VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 72515

Last change on this file since 72515 was 72514, checked in by vboxsync, 7 years ago

IEM: Don't intercept rdtscp both in iemOp_Grp7_rdtscp and iemCImpl_rdtscp, the latter is the correct one.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 332.6 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 72514 2018-06-11 14:23:25Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
251{
252 IEMOP_BITCH_ABOUT_STUB();
253 return IEMOP_RAISE_INVALID_OPCODE();
254}
255
256
257/** Opcode 0x0f 0x01 /0. */
258FNIEMOP_DEF(iemOp_Grp7_vmresume)
259{
260 IEMOP_BITCH_ABOUT_STUB();
261 return IEMOP_RAISE_INVALID_OPCODE();
262}
263
264
265/** Opcode 0x0f 0x01 /0. */
266FNIEMOP_DEF(iemOp_Grp7_vmxoff)
267{
268 IEMOP_BITCH_ABOUT_STUB();
269 return IEMOP_RAISE_INVALID_OPCODE();
270}
271
272
273/** Opcode 0x0f 0x01 /1. */
274FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
275{
276 IEMOP_MNEMONIC(sidt, "sidt Ms");
277 IEMOP_HLP_MIN_286();
278 IEMOP_HLP_64BIT_OP_SIZE();
279 IEM_MC_BEGIN(2, 1);
280 IEM_MC_ARG(uint8_t, iEffSeg, 0);
281 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
284 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
285 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
286 IEM_MC_END();
287 return VINF_SUCCESS;
288}
289
290
291/** Opcode 0x0f 0x01 /1. */
292FNIEMOP_DEF(iemOp_Grp7_monitor)
293{
294 IEMOP_MNEMONIC(monitor, "monitor");
295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
296 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
297}
298
299
300/** Opcode 0x0f 0x01 /1. */
301FNIEMOP_DEF(iemOp_Grp7_mwait)
302{
303 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
306}
307
308
309/** Opcode 0x0f 0x01 /2. */
310FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
311{
312 IEMOP_MNEMONIC(lgdt, "lgdt");
313 IEMOP_HLP_64BIT_OP_SIZE();
314 IEM_MC_BEGIN(3, 1);
315 IEM_MC_ARG(uint8_t, iEffSeg, 0);
316 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
317 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
320 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
321 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
322 IEM_MC_END();
323 return VINF_SUCCESS;
324}
325
326
327/** Opcode 0x0f 0x01 0xd0. */
328FNIEMOP_DEF(iemOp_Grp7_xgetbv)
329{
330 IEMOP_MNEMONIC(xgetbv, "xgetbv");
331 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
332 {
333 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
334 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
335 }
336 return IEMOP_RAISE_INVALID_OPCODE();
337}
338
339
340/** Opcode 0x0f 0x01 0xd1. */
341FNIEMOP_DEF(iemOp_Grp7_xsetbv)
342{
343 IEMOP_MNEMONIC(xsetbv, "xsetbv");
344 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
345 {
346 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
347 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
348 }
349 return IEMOP_RAISE_INVALID_OPCODE();
350}
351
352
353/** Opcode 0x0f 0x01 /3. */
354FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
355{
356 IEMOP_MNEMONIC(lidt, "lidt");
357 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
358 ? IEMMODE_64BIT
359 : pVCpu->iem.s.enmEffOpSize;
360 IEM_MC_BEGIN(3, 1);
361 IEM_MC_ARG(uint8_t, iEffSeg, 0);
362 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
363 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
367 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
368 IEM_MC_END();
369 return VINF_SUCCESS;
370}
371
372
373/** Opcode 0x0f 0x01 0xd8. */
374#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
375FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
376{
377 IEMOP_MNEMONIC(vmrun, "vmrun");
378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
379 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
380}
381#else
382FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
383#endif
384
385/** Opcode 0x0f 0x01 0xd9. */
386FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
387{
388 IEMOP_MNEMONIC(vmmcall, "vmmcall");
389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
390
391 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
392 want all hypercalls regardless of instruction used, and if a
393 hypercall isn't handled by GIM or HMSvm will raise an #UD.
394 (NEM/win makes ASSUMPTIONS about this behavior.) */
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
396}
397
398/** Opcode 0x0f 0x01 0xda. */
399#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
400FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
401{
402 IEMOP_MNEMONIC(vmload, "vmload");
403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
405}
406#else
407FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
408#endif
409
410
411/** Opcode 0x0f 0x01 0xdb. */
412#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
413FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
414{
415 IEMOP_MNEMONIC(vmsave, "vmsave");
416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
417 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
418}
419#else
420FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
421#endif
422
423
424/** Opcode 0x0f 0x01 0xdc. */
425#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
426FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
427{
428 IEMOP_MNEMONIC(stgi, "stgi");
429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
430 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
431}
432#else
433FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
434#endif
435
436
437/** Opcode 0x0f 0x01 0xdd. */
438#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
439FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
440{
441 IEMOP_MNEMONIC(clgi, "clgi");
442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
443 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
444}
445#else
446FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
447#endif
448
449
450/** Opcode 0x0f 0x01 0xdf. */
451#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
452FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
453{
454 IEMOP_MNEMONIC(invlpga, "invlpga");
455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
456 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
457}
458#else
459FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
460#endif
461
462
463/** Opcode 0x0f 0x01 0xde. */
464#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
465FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
466{
467 IEMOP_MNEMONIC(skinit, "skinit");
468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
470}
471#else
472FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
473#endif
474
475
476/** Opcode 0x0f 0x01 /4. */
477FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
478{
479 IEMOP_MNEMONIC(smsw, "smsw");
480 IEMOP_HLP_MIN_286();
481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
482 {
483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
484 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
485 }
486
487 /* Ignore operand size here, memory refs are always 16-bit. */
488 IEM_MC_BEGIN(2, 0);
489 IEM_MC_ARG(uint16_t, iEffSeg, 0);
490 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
494 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
495 IEM_MC_END();
496 return VINF_SUCCESS;
497}
498
499
500/** Opcode 0x0f 0x01 /6. */
501FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
502{
503 /* The operand size is effectively ignored, all is 16-bit and only the
504 lower 3-bits are used. */
505 IEMOP_MNEMONIC(lmsw, "lmsw");
506 IEMOP_HLP_MIN_286();
507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
508 {
509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
510 IEM_MC_BEGIN(1, 0);
511 IEM_MC_ARG(uint16_t, u16Tmp, 0);
512 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
513 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
514 IEM_MC_END();
515 }
516 else
517 {
518 IEM_MC_BEGIN(1, 1);
519 IEM_MC_ARG(uint16_t, u16Tmp, 0);
520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
523 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
524 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
525 IEM_MC_END();
526 }
527 return VINF_SUCCESS;
528}
529
530
531/** Opcode 0x0f 0x01 /7. */
532FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
533{
534 IEMOP_MNEMONIC(invlpg, "invlpg");
535 IEMOP_HLP_MIN_486();
536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
537 IEM_MC_BEGIN(1, 1);
538 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
540 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
541 IEM_MC_END();
542 return VINF_SUCCESS;
543}
544
545
546/** Opcode 0x0f 0x01 /7. */
547FNIEMOP_DEF(iemOp_Grp7_swapgs)
548{
549 IEMOP_MNEMONIC(swapgs, "swapgs");
550 IEMOP_HLP_ONLY_64BIT();
551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
552 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
553}
554
555
556/** Opcode 0x0f 0x01 /7. */
557FNIEMOP_DEF(iemOp_Grp7_rdtscp)
558{
559 IEMOP_MNEMONIC(rdtscp, "rdtscp");
560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
561 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
562}
563
564
565/**
566 * Group 7 jump table, memory variant.
567 */
568IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
569{
570 iemOp_Grp7_sgdt,
571 iemOp_Grp7_sidt,
572 iemOp_Grp7_lgdt,
573 iemOp_Grp7_lidt,
574 iemOp_Grp7_smsw,
575 iemOp_InvalidWithRM,
576 iemOp_Grp7_lmsw,
577 iemOp_Grp7_invlpg
578};
579
580
581/** Opcode 0x0f 0x01. */
582FNIEMOP_DEF(iemOp_Grp7)
583{
584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
585 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
586 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
587
588 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
589 {
590 case 0:
591 switch (bRm & X86_MODRM_RM_MASK)
592 {
593 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
594 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
595 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
596 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
597 }
598 return IEMOP_RAISE_INVALID_OPCODE();
599
600 case 1:
601 switch (bRm & X86_MODRM_RM_MASK)
602 {
603 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
604 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
605 }
606 return IEMOP_RAISE_INVALID_OPCODE();
607
608 case 2:
609 switch (bRm & X86_MODRM_RM_MASK)
610 {
611 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
612 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
613 }
614 return IEMOP_RAISE_INVALID_OPCODE();
615
616 case 3:
617 switch (bRm & X86_MODRM_RM_MASK)
618 {
619 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
620 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
621 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
622 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
623 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
624 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
625 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
626 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
628 }
629
630 case 4:
631 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
632
633 case 5:
634 return IEMOP_RAISE_INVALID_OPCODE();
635
636 case 6:
637 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
638
639 case 7:
640 switch (bRm & X86_MODRM_RM_MASK)
641 {
642 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
643 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
644 }
645 return IEMOP_RAISE_INVALID_OPCODE();
646
647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
648 }
649}
650
651/** Opcode 0x0f 0x00 /3. */
652FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
653{
654 IEMOP_HLP_NO_REAL_OR_V86_MODE();
655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
656
657 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
658 {
659 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
660 switch (pVCpu->iem.s.enmEffOpSize)
661 {
662 case IEMMODE_16BIT:
663 {
664 IEM_MC_BEGIN(3, 0);
665 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 1);
667 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
668
669 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
670 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
671 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
672
673 IEM_MC_END();
674 return VINF_SUCCESS;
675 }
676
677 case IEMMODE_32BIT:
678 case IEMMODE_64BIT:
679 {
680 IEM_MC_BEGIN(3, 0);
681 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
682 IEM_MC_ARG(uint16_t, u16Sel, 1);
683 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
684
685 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
686 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
687 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
688
689 IEM_MC_END();
690 return VINF_SUCCESS;
691 }
692
693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
694 }
695 }
696 else
697 {
698 switch (pVCpu->iem.s.enmEffOpSize)
699 {
700 case IEMMODE_16BIT:
701 {
702 IEM_MC_BEGIN(3, 1);
703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
704 IEM_MC_ARG(uint16_t, u16Sel, 1);
705 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
707
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
710
711 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
712 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
713 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
714
715 IEM_MC_END();
716 return VINF_SUCCESS;
717 }
718
719 case IEMMODE_32BIT:
720 case IEMMODE_64BIT:
721 {
722 IEM_MC_BEGIN(3, 1);
723 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 1);
725 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
727
728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
729 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
730/** @todo testcase: make sure it's a 16-bit read. */
731
732 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
733 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
734 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
735
736 IEM_MC_END();
737 return VINF_SUCCESS;
738 }
739
740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
741 }
742 }
743}
744
745
746
747/** Opcode 0x0f 0x02. */
748FNIEMOP_DEF(iemOp_lar_Gv_Ew)
749{
750 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
751 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
752}
753
754
755/** Opcode 0x0f 0x03. */
756FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
757{
758 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
759 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
760}
761
762
763/** Opcode 0x0f 0x05. */
764FNIEMOP_DEF(iemOp_syscall)
765{
766 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
768 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
769}
770
771
772/** Opcode 0x0f 0x06. */
773FNIEMOP_DEF(iemOp_clts)
774{
775 IEMOP_MNEMONIC(clts, "clts");
776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
777 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
778}
779
780
781/** Opcode 0x0f 0x07. */
782FNIEMOP_DEF(iemOp_sysret)
783{
784 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
786 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
787}
788
789
790/** Opcode 0x0f 0x08. */
791FNIEMOP_DEF(iemOp_invd)
792{
793 IEMOP_MNEMONIC(invd, "invd");
794#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
795 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
796 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
797#else
798 RT_NOREF_PV(pVCpu);
799#endif
800 /** @todo implement invd for the regular case (above only handles nested SVM
801 * exits). */
802 IEMOP_BITCH_ABOUT_STUB();
803 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
804}
805
806// IEMOP_HLP_MIN_486();
807
808
809/** Opcode 0x0f 0x09. */
810FNIEMOP_DEF(iemOp_wbinvd)
811{
812 IEMOP_MNEMONIC(wbinvd, "wbinvd");
813 IEMOP_HLP_MIN_486();
814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
815 IEM_MC_BEGIN(0, 0);
816 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
817 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
818 IEM_MC_ADVANCE_RIP();
819 IEM_MC_END();
820 return VINF_SUCCESS; /* ignore for now */
821}
822
823
824/** Opcode 0x0f 0x0b. */
825FNIEMOP_DEF(iemOp_ud2)
826{
827 IEMOP_MNEMONIC(ud2, "ud2");
828 return IEMOP_RAISE_INVALID_OPCODE();
829}
830
831/** Opcode 0x0f 0x0d. */
832FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
833{
834 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
835 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
836 {
837 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
838 return IEMOP_RAISE_INVALID_OPCODE();
839 }
840
841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
842 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
843 {
844 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
845 return IEMOP_RAISE_INVALID_OPCODE();
846 }
847
848 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
849 {
850 case 2: /* Aliased to /0 for the time being. */
851 case 4: /* Aliased to /0 for the time being. */
852 case 5: /* Aliased to /0 for the time being. */
853 case 6: /* Aliased to /0 for the time being. */
854 case 7: /* Aliased to /0 for the time being. */
855 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
856 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
857 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
859 }
860
861 IEM_MC_BEGIN(0, 1);
862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
865 /* Currently a NOP. */
866 NOREF(GCPtrEffSrc);
867 IEM_MC_ADVANCE_RIP();
868 IEM_MC_END();
869 return VINF_SUCCESS;
870}
871
872
873/** Opcode 0x0f 0x0e. */
874FNIEMOP_DEF(iemOp_femms)
875{
876 IEMOP_MNEMONIC(femms, "femms");
877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
878
879 IEM_MC_BEGIN(0,0);
880 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
881 IEM_MC_MAYBE_RAISE_FPU_XCPT();
882 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
883 IEM_MC_FPU_FROM_MMX_MODE();
884 IEM_MC_ADVANCE_RIP();
885 IEM_MC_END();
886 return VINF_SUCCESS;
887}
888
889
890/** Opcode 0x0f 0x0f. */
891FNIEMOP_DEF(iemOp_3Dnow)
892{
893 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
894 {
895 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
896 return IEMOP_RAISE_INVALID_OPCODE();
897 }
898
899#ifdef IEM_WITH_3DNOW
900 /* This is pretty sparse, use switch instead of table. */
901 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
902 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
903#else
904 IEMOP_BITCH_ABOUT_STUB();
905 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
906#endif
907}
908
909
910/**
911 * @opcode 0x10
912 * @oppfx none
913 * @opcpuid sse
914 * @opgroup og_sse_simdfp_datamove
915 * @opxcpttype 4UA
916 * @optest op1=1 op2=2 -> op1=2
917 * @optest op1=0 op2=-22 -> op1=-22
918 */
919FNIEMOP_DEF(iemOp_movups_Vps_Wps)
920{
921 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
924 {
925 /*
926 * Register, register.
927 */
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
929 IEM_MC_BEGIN(0, 0);
930 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
931 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
932 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
933 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
934 IEM_MC_ADVANCE_RIP();
935 IEM_MC_END();
936 }
937 else
938 {
939 /*
940 * Memory, register.
941 */
942 IEM_MC_BEGIN(0, 2);
943 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
945
946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
948 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
949 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
950
951 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
952 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
953
954 IEM_MC_ADVANCE_RIP();
955 IEM_MC_END();
956 }
957 return VINF_SUCCESS;
958
959}
960
961
962/**
963 * @opcode 0x10
964 * @oppfx 0x66
965 * @opcpuid sse2
966 * @opgroup og_sse2_pcksclr_datamove
967 * @opxcpttype 4UA
968 * @optest op1=1 op2=2 -> op1=2
969 * @optest op1=0 op2=-42 -> op1=-42
970 */
971FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
972{
973 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
975 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
976 {
977 /*
978 * Register, register.
979 */
980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
981 IEM_MC_BEGIN(0, 0);
982 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
983 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
984 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
985 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
986 IEM_MC_ADVANCE_RIP();
987 IEM_MC_END();
988 }
989 else
990 {
991 /*
992 * Memory, register.
993 */
994 IEM_MC_BEGIN(0, 2);
995 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
997
998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1000 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1001 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1002
1003 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1004 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1005
1006 IEM_MC_ADVANCE_RIP();
1007 IEM_MC_END();
1008 }
1009 return VINF_SUCCESS;
1010}
1011
1012
1013/**
1014 * @opcode 0x10
1015 * @oppfx 0xf3
1016 * @opcpuid sse
1017 * @opgroup og_sse_simdfp_datamove
1018 * @opxcpttype 5
1019 * @optest op1=1 op2=2 -> op1=2
1020 * @optest op1=0 op2=-22 -> op1=-22
1021 */
1022FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1023{
1024 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1026 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1027 {
1028 /*
1029 * Register, register.
1030 */
1031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1032 IEM_MC_BEGIN(0, 1);
1033 IEM_MC_LOCAL(uint32_t, uSrc);
1034
1035 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1036 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1037 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1038 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1039
1040 IEM_MC_ADVANCE_RIP();
1041 IEM_MC_END();
1042 }
1043 else
1044 {
1045 /*
1046 * Memory, register.
1047 */
1048 IEM_MC_BEGIN(0, 2);
1049 IEM_MC_LOCAL(uint32_t, uSrc);
1050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1051
1052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1054 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1055 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1056
1057 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1058 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1059
1060 IEM_MC_ADVANCE_RIP();
1061 IEM_MC_END();
1062 }
1063 return VINF_SUCCESS;
1064}
1065
1066
1067/**
1068 * @opcode 0x10
1069 * @oppfx 0xf2
1070 * @opcpuid sse2
1071 * @opgroup og_sse2_pcksclr_datamove
1072 * @opxcpttype 5
1073 * @optest op1=1 op2=2 -> op1=2
1074 * @optest op1=0 op2=-42 -> op1=-42
1075 */
1076FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1077{
1078 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1081 {
1082 /*
1083 * Register, register.
1084 */
1085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1086 IEM_MC_BEGIN(0, 1);
1087 IEM_MC_LOCAL(uint64_t, uSrc);
1088
1089 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1090 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1091 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1092 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1093
1094 IEM_MC_ADVANCE_RIP();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * Memory, register.
1101 */
1102 IEM_MC_BEGIN(0, 2);
1103 IEM_MC_LOCAL(uint64_t, uSrc);
1104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1105
1106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1108 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1109 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1110
1111 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1112 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1113
1114 IEM_MC_ADVANCE_RIP();
1115 IEM_MC_END();
1116 }
1117 return VINF_SUCCESS;
1118}
1119
1120
1121/**
1122 * @opcode 0x11
1123 * @oppfx none
1124 * @opcpuid sse
1125 * @opgroup og_sse_simdfp_datamove
1126 * @opxcpttype 4UA
1127 * @optest op1=1 op2=2 -> op1=2
1128 * @optest op1=0 op2=-42 -> op1=-42
1129 */
1130FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1131{
1132 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1134 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1135 {
1136 /*
1137 * Register, register.
1138 */
1139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1140 IEM_MC_BEGIN(0, 0);
1141 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1142 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1143 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1144 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1145 IEM_MC_ADVANCE_RIP();
1146 IEM_MC_END();
1147 }
1148 else
1149 {
1150 /*
1151 * Memory, register.
1152 */
1153 IEM_MC_BEGIN(0, 2);
1154 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1156
1157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1159 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1160 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1161
1162 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1163 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1164
1165 IEM_MC_ADVANCE_RIP();
1166 IEM_MC_END();
1167 }
1168 return VINF_SUCCESS;
1169}
1170
1171
1172/**
1173 * @opcode 0x11
1174 * @oppfx 0x66
1175 * @opcpuid sse2
1176 * @opgroup og_sse2_pcksclr_datamove
1177 * @opxcpttype 4UA
1178 * @optest op1=1 op2=2 -> op1=2
1179 * @optest op1=0 op2=-42 -> op1=-42
1180 */
1181FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1182{
1183 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1186 {
1187 /*
1188 * Register, register.
1189 */
1190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1191 IEM_MC_BEGIN(0, 0);
1192 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1193 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1194 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1195 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 else
1200 {
1201 /*
1202 * Memory, register.
1203 */
1204 IEM_MC_BEGIN(0, 2);
1205 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1207
1208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1210 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1211 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1212
1213 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1214 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1215
1216 IEM_MC_ADVANCE_RIP();
1217 IEM_MC_END();
1218 }
1219 return VINF_SUCCESS;
1220}
1221
1222
1223/**
1224 * @opcode 0x11
1225 * @oppfx 0xf3
1226 * @opcpuid sse
1227 * @opgroup og_sse_simdfp_datamove
1228 * @opxcpttype 5
1229 * @optest op1=1 op2=2 -> op1=2
1230 * @optest op1=0 op2=-22 -> op1=-22
1231 */
1232FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1233{
1234 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1236 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1237 {
1238 /*
1239 * Register, register.
1240 */
1241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1242 IEM_MC_BEGIN(0, 1);
1243 IEM_MC_LOCAL(uint32_t, uSrc);
1244
1245 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1246 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1247 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1248 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1249
1250 IEM_MC_ADVANCE_RIP();
1251 IEM_MC_END();
1252 }
1253 else
1254 {
1255 /*
1256 * Memory, register.
1257 */
1258 IEM_MC_BEGIN(0, 2);
1259 IEM_MC_LOCAL(uint32_t, uSrc);
1260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1261
1262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1264 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1265 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1266
1267 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1268 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1269
1270 IEM_MC_ADVANCE_RIP();
1271 IEM_MC_END();
1272 }
1273 return VINF_SUCCESS;
1274}
1275
1276
1277/**
1278 * @opcode 0x11
1279 * @oppfx 0xf2
1280 * @opcpuid sse2
1281 * @opgroup og_sse2_pcksclr_datamove
1282 * @opxcpttype 5
1283 * @optest op1=1 op2=2 -> op1=2
1284 * @optest op1=0 op2=-42 -> op1=-42
1285 */
1286FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1287{
1288 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1291 {
1292 /*
1293 * Register, register.
1294 */
1295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1296 IEM_MC_BEGIN(0, 1);
1297 IEM_MC_LOCAL(uint64_t, uSrc);
1298
1299 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1300 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1301 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1302 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1303
1304 IEM_MC_ADVANCE_RIP();
1305 IEM_MC_END();
1306 }
1307 else
1308 {
1309 /*
1310 * Memory, register.
1311 */
1312 IEM_MC_BEGIN(0, 2);
1313 IEM_MC_LOCAL(uint64_t, uSrc);
1314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1315
1316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1318 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1319 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1320
1321 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1322 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1323
1324 IEM_MC_ADVANCE_RIP();
1325 IEM_MC_END();
1326 }
1327 return VINF_SUCCESS;
1328}
1329
1330
1331FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1332{
1333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1335 {
1336 /**
1337 * @opcode 0x12
1338 * @opcodesub 11 mr/reg
1339 * @oppfx none
1340 * @opcpuid sse
1341 * @opgroup og_sse_simdfp_datamove
1342 * @opxcpttype 5
1343 * @optest op1=1 op2=2 -> op1=2
1344 * @optest op1=0 op2=-42 -> op1=-42
1345 */
1346 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1347
1348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1349 IEM_MC_BEGIN(0, 1);
1350 IEM_MC_LOCAL(uint64_t, uSrc);
1351
1352 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1353 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1354 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1355 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1356
1357 IEM_MC_ADVANCE_RIP();
1358 IEM_MC_END();
1359 }
1360 else
1361 {
1362 /**
1363 * @opdone
1364 * @opcode 0x12
1365 * @opcodesub !11 mr/reg
1366 * @oppfx none
1367 * @opcpuid sse
1368 * @opgroup og_sse_simdfp_datamove
1369 * @opxcpttype 5
1370 * @optest op1=1 op2=2 -> op1=2
1371 * @optest op1=0 op2=-42 -> op1=-42
1372 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1373 */
1374 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1375
1376 IEM_MC_BEGIN(0, 2);
1377 IEM_MC_LOCAL(uint64_t, uSrc);
1378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1379
1380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1382 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1383 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1384
1385 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1386 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1387
1388 IEM_MC_ADVANCE_RIP();
1389 IEM_MC_END();
1390 }
1391 return VINF_SUCCESS;
1392}
1393
1394
1395/**
1396 * @opcode 0x12
1397 * @opcodesub !11 mr/reg
1398 * @oppfx 0x66
1399 * @opcpuid sse2
1400 * @opgroup og_sse2_pcksclr_datamove
1401 * @opxcpttype 5
1402 * @optest op1=1 op2=2 -> op1=2
1403 * @optest op1=0 op2=-42 -> op1=-42
1404 */
1405FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1406{
1407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1408 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1409 {
1410 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1411
1412 IEM_MC_BEGIN(0, 2);
1413 IEM_MC_LOCAL(uint64_t, uSrc);
1414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1415
1416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1418 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1419 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1420
1421 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1422 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1423
1424 IEM_MC_ADVANCE_RIP();
1425 IEM_MC_END();
1426 return VINF_SUCCESS;
1427 }
1428
1429 /**
1430 * @opdone
1431 * @opmnemonic ud660f12m3
1432 * @opcode 0x12
1433 * @opcodesub 11 mr/reg
1434 * @oppfx 0x66
1435 * @opunused immediate
1436 * @opcpuid sse
1437 * @optest ->
1438 */
1439 return IEMOP_RAISE_INVALID_OPCODE();
1440}
1441
1442
1443/**
1444 * @opcode 0x12
1445 * @oppfx 0xf3
1446 * @opcpuid sse3
1447 * @opgroup og_sse3_pcksclr_datamove
1448 * @opxcpttype 4
1449 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1450 * op1=0x00000002000000020000000100000001
1451 */
1452FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1453{
1454 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1457 {
1458 /*
1459 * Register, register.
1460 */
1461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1462 IEM_MC_BEGIN(2, 0);
1463 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1464 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1465
1466 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1467 IEM_MC_PREPARE_SSE_USAGE();
1468
1469 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1470 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1471 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1472
1473 IEM_MC_ADVANCE_RIP();
1474 IEM_MC_END();
1475 }
1476 else
1477 {
1478 /*
1479 * Register, memory.
1480 */
1481 IEM_MC_BEGIN(2, 2);
1482 IEM_MC_LOCAL(RTUINT128U, uSrc);
1483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1484 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1485 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1486
1487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1489 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1490 IEM_MC_PREPARE_SSE_USAGE();
1491
1492 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1493 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1494 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1495
1496 IEM_MC_ADVANCE_RIP();
1497 IEM_MC_END();
1498 }
1499 return VINF_SUCCESS;
1500}
1501
1502
1503/**
1504 * @opcode 0x12
1505 * @oppfx 0xf2
1506 * @opcpuid sse3
1507 * @opgroup og_sse3_pcksclr_datamove
1508 * @opxcpttype 5
1509 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1510 * op1=0x22222222111111112222222211111111
1511 */
1512FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1513{
1514 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1517 {
1518 /*
1519 * Register, register.
1520 */
1521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1522 IEM_MC_BEGIN(2, 0);
1523 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1524 IEM_MC_ARG(uint64_t, uSrc, 1);
1525
1526 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1527 IEM_MC_PREPARE_SSE_USAGE();
1528
1529 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1530 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1531 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1532
1533 IEM_MC_ADVANCE_RIP();
1534 IEM_MC_END();
1535 }
1536 else
1537 {
1538 /*
1539 * Register, memory.
1540 */
1541 IEM_MC_BEGIN(2, 2);
1542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1543 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1544 IEM_MC_ARG(uint64_t, uSrc, 1);
1545
1546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 return VINF_SUCCESS;
1559}
1560
1561
1562/**
1563 * @opcode 0x13
1564 * @opcodesub !11 mr/reg
1565 * @oppfx none
1566 * @opcpuid sse
1567 * @opgroup og_sse_simdfp_datamove
1568 * @opxcpttype 5
1569 * @optest op1=1 op2=2 -> op1=2
1570 * @optest op1=0 op2=-42 -> op1=-42
1571 */
1572FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1573{
1574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1575 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1576 {
1577 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1578
1579 IEM_MC_BEGIN(0, 2);
1580 IEM_MC_LOCAL(uint64_t, uSrc);
1581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1582
1583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1585 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1587
1588 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1589 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1590
1591 IEM_MC_ADVANCE_RIP();
1592 IEM_MC_END();
1593 return VINF_SUCCESS;
1594 }
1595
1596 /**
1597 * @opdone
1598 * @opmnemonic ud0f13m3
1599 * @opcode 0x13
1600 * @opcodesub 11 mr/reg
1601 * @oppfx none
1602 * @opunused immediate
1603 * @opcpuid sse
1604 * @optest ->
1605 */
1606 return IEMOP_RAISE_INVALID_OPCODE();
1607}
1608
1609
1610/**
1611 * @opcode 0x13
1612 * @opcodesub !11 mr/reg
1613 * @oppfx 0x66
1614 * @opcpuid sse2
1615 * @opgroup og_sse2_pcksclr_datamove
1616 * @opxcpttype 5
1617 * @optest op1=1 op2=2 -> op1=2
1618 * @optest op1=0 op2=-42 -> op1=-42
1619 */
1620FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1621{
1622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1623 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1624 {
1625 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1626 IEM_MC_BEGIN(0, 2);
1627 IEM_MC_LOCAL(uint64_t, uSrc);
1628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1629
1630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1632 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1633 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1634
1635 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1636 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1637
1638 IEM_MC_ADVANCE_RIP();
1639 IEM_MC_END();
1640 return VINF_SUCCESS;
1641 }
1642
1643 /**
1644 * @opdone
1645 * @opmnemonic ud660f13m3
1646 * @opcode 0x13
1647 * @opcodesub 11 mr/reg
1648 * @oppfx 0x66
1649 * @opunused immediate
1650 * @opcpuid sse
1651 * @optest ->
1652 */
1653 return IEMOP_RAISE_INVALID_OPCODE();
1654}
1655
1656
1657/**
1658 * @opmnemonic udf30f13
1659 * @opcode 0x13
1660 * @oppfx 0xf3
1661 * @opunused intel-modrm
1662 * @opcpuid sse
1663 * @optest ->
1664 * @opdone
1665 */
1666
1667/**
1668 * @opmnemonic udf20f13
1669 * @opcode 0x13
1670 * @oppfx 0xf2
1671 * @opunused intel-modrm
1672 * @opcpuid sse
1673 * @optest ->
1674 * @opdone
1675 */
1676
1677/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1678FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1679/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1680FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1681
1682/**
1683 * @opdone
1684 * @opmnemonic udf30f14
1685 * @opcode 0x14
1686 * @oppfx 0xf3
1687 * @opunused intel-modrm
1688 * @opcpuid sse
1689 * @optest ->
1690 * @opdone
1691 */
1692
1693/**
1694 * @opmnemonic udf20f14
1695 * @opcode 0x14
1696 * @oppfx 0xf2
1697 * @opunused intel-modrm
1698 * @opcpuid sse
1699 * @optest ->
1700 * @opdone
1701 */
1702
1703/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1704FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1705/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1706FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1707/* Opcode 0xf3 0x0f 0x15 - invalid */
1708/* Opcode 0xf2 0x0f 0x15 - invalid */
1709
1710/**
1711 * @opdone
1712 * @opmnemonic udf30f15
1713 * @opcode 0x15
1714 * @oppfx 0xf3
1715 * @opunused intel-modrm
1716 * @opcpuid sse
1717 * @optest ->
1718 * @opdone
1719 */
1720
1721/**
1722 * @opmnemonic udf20f15
1723 * @opcode 0x15
1724 * @oppfx 0xf2
1725 * @opunused intel-modrm
1726 * @opcpuid sse
1727 * @optest ->
1728 * @opdone
1729 */
1730
1731FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1732{
1733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1734 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1735 {
1736 /**
1737 * @opcode 0x16
1738 * @opcodesub 11 mr/reg
1739 * @oppfx none
1740 * @opcpuid sse
1741 * @opgroup og_sse_simdfp_datamove
1742 * @opxcpttype 5
1743 * @optest op1=1 op2=2 -> op1=2
1744 * @optest op1=0 op2=-42 -> op1=-42
1745 */
1746 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1747
1748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1749 IEM_MC_BEGIN(0, 1);
1750 IEM_MC_LOCAL(uint64_t, uSrc);
1751
1752 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1753 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1754 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1755 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1756
1757 IEM_MC_ADVANCE_RIP();
1758 IEM_MC_END();
1759 }
1760 else
1761 {
1762 /**
1763 * @opdone
1764 * @opcode 0x16
1765 * @opcodesub !11 mr/reg
1766 * @oppfx none
1767 * @opcpuid sse
1768 * @opgroup og_sse_simdfp_datamove
1769 * @opxcpttype 5
1770 * @optest op1=1 op2=2 -> op1=2
1771 * @optest op1=0 op2=-42 -> op1=-42
1772 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1773 */
1774 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1775
1776 IEM_MC_BEGIN(0, 2);
1777 IEM_MC_LOCAL(uint64_t, uSrc);
1778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1779
1780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1782 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1783 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1784
1785 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1786 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1787
1788 IEM_MC_ADVANCE_RIP();
1789 IEM_MC_END();
1790 }
1791 return VINF_SUCCESS;
1792}
1793
1794
1795/**
1796 * @opcode 0x16
1797 * @opcodesub !11 mr/reg
1798 * @oppfx 0x66
1799 * @opcpuid sse2
1800 * @opgroup og_sse2_pcksclr_datamove
1801 * @opxcpttype 5
1802 * @optest op1=1 op2=2 -> op1=2
1803 * @optest op1=0 op2=-42 -> op1=-42
1804 */
1805FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1806{
1807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1808 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1809 {
1810 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1811 IEM_MC_BEGIN(0, 2);
1812 IEM_MC_LOCAL(uint64_t, uSrc);
1813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1814
1815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1817 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1818 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1819
1820 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1821 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1822
1823 IEM_MC_ADVANCE_RIP();
1824 IEM_MC_END();
1825 return VINF_SUCCESS;
1826 }
1827
1828 /**
1829 * @opdone
1830 * @opmnemonic ud660f16m3
1831 * @opcode 0x16
1832 * @opcodesub 11 mr/reg
1833 * @oppfx 0x66
1834 * @opunused immediate
1835 * @opcpuid sse
1836 * @optest ->
1837 */
1838 return IEMOP_RAISE_INVALID_OPCODE();
1839}
1840
1841
1842/**
1843 * @opcode 0x16
1844 * @oppfx 0xf3
1845 * @opcpuid sse3
1846 * @opgroup og_sse3_pcksclr_datamove
1847 * @opxcpttype 4
1848 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1849 * op1=0x00000002000000020000000100000001
1850 */
1851FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1852{
1853 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1856 {
1857 /*
1858 * Register, register.
1859 */
1860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1861 IEM_MC_BEGIN(2, 0);
1862 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1863 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1864
1865 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1866 IEM_MC_PREPARE_SSE_USAGE();
1867
1868 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1869 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1870 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1871
1872 IEM_MC_ADVANCE_RIP();
1873 IEM_MC_END();
1874 }
1875 else
1876 {
1877 /*
1878 * Register, memory.
1879 */
1880 IEM_MC_BEGIN(2, 2);
1881 IEM_MC_LOCAL(RTUINT128U, uSrc);
1882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1883 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1884 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1885
1886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1888 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1889 IEM_MC_PREPARE_SSE_USAGE();
1890
1891 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1892 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1893 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1894
1895 IEM_MC_ADVANCE_RIP();
1896 IEM_MC_END();
1897 }
1898 return VINF_SUCCESS;
1899}
1900
1901/**
1902 * @opdone
1903 * @opmnemonic udf30f16
1904 * @opcode 0x16
1905 * @oppfx 0xf2
1906 * @opunused intel-modrm
1907 * @opcpuid sse
1908 * @optest ->
1909 * @opdone
1910 */
1911
1912
1913/**
1914 * @opcode 0x17
1915 * @opcodesub !11 mr/reg
1916 * @oppfx none
1917 * @opcpuid sse
1918 * @opgroup og_sse_simdfp_datamove
1919 * @opxcpttype 5
1920 * @optest op1=1 op2=2 -> op1=2
1921 * @optest op1=0 op2=-42 -> op1=-42
1922 */
1923FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1924{
1925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1926 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1927 {
1928 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1929
1930 IEM_MC_BEGIN(0, 2);
1931 IEM_MC_LOCAL(uint64_t, uSrc);
1932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1933
1934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1936 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1937 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1938
1939 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1940 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1941
1942 IEM_MC_ADVANCE_RIP();
1943 IEM_MC_END();
1944 return VINF_SUCCESS;
1945 }
1946
1947 /**
1948 * @opdone
1949 * @opmnemonic ud0f17m3
1950 * @opcode 0x17
1951 * @opcodesub 11 mr/reg
1952 * @oppfx none
1953 * @opunused immediate
1954 * @opcpuid sse
1955 * @optest ->
1956 */
1957 return IEMOP_RAISE_INVALID_OPCODE();
1958}
1959
1960
1961/**
1962 * @opcode 0x17
1963 * @opcodesub !11 mr/reg
1964 * @oppfx 0x66
1965 * @opcpuid sse2
1966 * @opgroup og_sse2_pcksclr_datamove
1967 * @opxcpttype 5
1968 * @optest op1=1 op2=2 -> op1=2
1969 * @optest op1=0 op2=-42 -> op1=-42
1970 */
1971FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1972{
1973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1974 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1975 {
1976 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1977
1978 IEM_MC_BEGIN(0, 2);
1979 IEM_MC_LOCAL(uint64_t, uSrc);
1980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1981
1982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1984 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1985 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1986
1987 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1988 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1989
1990 IEM_MC_ADVANCE_RIP();
1991 IEM_MC_END();
1992 return VINF_SUCCESS;
1993 }
1994
1995 /**
1996 * @opdone
1997 * @opmnemonic ud660f17m3
1998 * @opcode 0x17
1999 * @opcodesub 11 mr/reg
2000 * @oppfx 0x66
2001 * @opunused immediate
2002 * @opcpuid sse
2003 * @optest ->
2004 */
2005 return IEMOP_RAISE_INVALID_OPCODE();
2006}
2007
2008
2009/**
2010 * @opdone
2011 * @opmnemonic udf30f17
2012 * @opcode 0x17
2013 * @oppfx 0xf3
2014 * @opunused intel-modrm
2015 * @opcpuid sse
2016 * @optest ->
2017 * @opdone
2018 */
2019
2020/**
2021 * @opmnemonic udf20f17
2022 * @opcode 0x17
2023 * @oppfx 0xf2
2024 * @opunused intel-modrm
2025 * @opcpuid sse
2026 * @optest ->
2027 * @opdone
2028 */
2029
2030
2031/** Opcode 0x0f 0x18. */
2032FNIEMOP_DEF(iemOp_prefetch_Grp16)
2033{
2034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2035 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2036 {
2037 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2038 {
2039 case 4: /* Aliased to /0 for the time being according to AMD. */
2040 case 5: /* Aliased to /0 for the time being according to AMD. */
2041 case 6: /* Aliased to /0 for the time being according to AMD. */
2042 case 7: /* Aliased to /0 for the time being according to AMD. */
2043 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2044 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2045 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2046 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2048 }
2049
2050 IEM_MC_BEGIN(0, 1);
2051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2054 /* Currently a NOP. */
2055 NOREF(GCPtrEffSrc);
2056 IEM_MC_ADVANCE_RIP();
2057 IEM_MC_END();
2058 return VINF_SUCCESS;
2059 }
2060
2061 return IEMOP_RAISE_INVALID_OPCODE();
2062}
2063
2064
2065/** Opcode 0x0f 0x19..0x1f. */
2066FNIEMOP_DEF(iemOp_nop_Ev)
2067{
2068 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2071 {
2072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2073 IEM_MC_BEGIN(0, 0);
2074 IEM_MC_ADVANCE_RIP();
2075 IEM_MC_END();
2076 }
2077 else
2078 {
2079 IEM_MC_BEGIN(0, 1);
2080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2083 /* Currently a NOP. */
2084 NOREF(GCPtrEffSrc);
2085 IEM_MC_ADVANCE_RIP();
2086 IEM_MC_END();
2087 }
2088 return VINF_SUCCESS;
2089}
2090
2091
2092/** Opcode 0x0f 0x20. */
2093FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2094{
2095 /* mod is ignored, as is operand size overrides. */
2096 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2097 IEMOP_HLP_MIN_386();
2098 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2099 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2100 else
2101 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2102
2103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2104 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2105 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2106 {
2107 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2108 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2109 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2110 iCrReg |= 8;
2111 }
2112 switch (iCrReg)
2113 {
2114 case 0: case 2: case 3: case 4: case 8:
2115 break;
2116 default:
2117 return IEMOP_RAISE_INVALID_OPCODE();
2118 }
2119 IEMOP_HLP_DONE_DECODING();
2120
2121 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2122}
2123
2124
2125/** Opcode 0x0f 0x21. */
2126FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2127{
2128 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2129 IEMOP_HLP_MIN_386();
2130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2132 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2133 return IEMOP_RAISE_INVALID_OPCODE();
2134 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2135 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2136 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2137}
2138
2139
2140/** Opcode 0x0f 0x22. */
2141FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2142{
2143 /* mod is ignored, as is operand size overrides. */
2144 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2145 IEMOP_HLP_MIN_386();
2146 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2147 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2148 else
2149 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2150
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2152 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2153 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2154 {
2155 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2156 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2157 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2158 iCrReg |= 8;
2159 }
2160 switch (iCrReg)
2161 {
2162 case 0: case 2: case 3: case 4: case 8:
2163 break;
2164 default:
2165 return IEMOP_RAISE_INVALID_OPCODE();
2166 }
2167 IEMOP_HLP_DONE_DECODING();
2168
2169 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2170}
2171
2172
2173/** Opcode 0x0f 0x23. */
2174FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2175{
2176 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2177 IEMOP_HLP_MIN_386();
2178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2180 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2183 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2184 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2185}
2186
2187
2188/** Opcode 0x0f 0x24. */
2189FNIEMOP_DEF(iemOp_mov_Rd_Td)
2190{
2191 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2192 /** @todo works on 386 and 486. */
2193 /* The RM byte is not considered, see testcase. */
2194 return IEMOP_RAISE_INVALID_OPCODE();
2195}
2196
2197
2198/** Opcode 0x0f 0x26. */
2199FNIEMOP_DEF(iemOp_mov_Td_Rd)
2200{
2201 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2202 /** @todo works on 386 and 486. */
2203 /* The RM byte is not considered, see testcase. */
2204 return IEMOP_RAISE_INVALID_OPCODE();
2205}
2206
2207
2208/**
2209 * @opcode 0x28
2210 * @oppfx none
2211 * @opcpuid sse
2212 * @opgroup og_sse_simdfp_datamove
2213 * @opxcpttype 1
2214 * @optest op1=1 op2=2 -> op1=2
2215 * @optest op1=0 op2=-42 -> op1=-42
2216 */
2217FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2218{
2219 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2221 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2222 {
2223 /*
2224 * Register, register.
2225 */
2226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2227 IEM_MC_BEGIN(0, 0);
2228 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2229 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2230 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2231 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2232 IEM_MC_ADVANCE_RIP();
2233 IEM_MC_END();
2234 }
2235 else
2236 {
2237 /*
2238 * Register, memory.
2239 */
2240 IEM_MC_BEGIN(0, 2);
2241 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2243
2244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2246 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2248
2249 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2250 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2251
2252 IEM_MC_ADVANCE_RIP();
2253 IEM_MC_END();
2254 }
2255 return VINF_SUCCESS;
2256}
2257
2258/**
2259 * @opcode 0x28
2260 * @oppfx 66
2261 * @opcpuid sse2
2262 * @opgroup og_sse2_pcksclr_datamove
2263 * @opxcpttype 1
2264 * @optest op1=1 op2=2 -> op1=2
2265 * @optest op1=0 op2=-42 -> op1=-42
2266 */
2267FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2268{
2269 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2271 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2272 {
2273 /*
2274 * Register, register.
2275 */
2276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2277 IEM_MC_BEGIN(0, 0);
2278 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2279 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2280 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2281 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2282 IEM_MC_ADVANCE_RIP();
2283 IEM_MC_END();
2284 }
2285 else
2286 {
2287 /*
2288 * Register, memory.
2289 */
2290 IEM_MC_BEGIN(0, 2);
2291 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2293
2294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2296 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2297 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2298
2299 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2300 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2301
2302 IEM_MC_ADVANCE_RIP();
2303 IEM_MC_END();
2304 }
2305 return VINF_SUCCESS;
2306}
2307
2308/* Opcode 0xf3 0x0f 0x28 - invalid */
2309/* Opcode 0xf2 0x0f 0x28 - invalid */
2310
2311/**
2312 * @opcode 0x29
2313 * @oppfx none
2314 * @opcpuid sse
2315 * @opgroup og_sse_simdfp_datamove
2316 * @opxcpttype 1
2317 * @optest op1=1 op2=2 -> op1=2
2318 * @optest op1=0 op2=-42 -> op1=-42
2319 */
2320FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2321{
2322 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2324 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2325 {
2326 /*
2327 * Register, register.
2328 */
2329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2330 IEM_MC_BEGIN(0, 0);
2331 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2332 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2333 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2334 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2335 IEM_MC_ADVANCE_RIP();
2336 IEM_MC_END();
2337 }
2338 else
2339 {
2340 /*
2341 * Memory, register.
2342 */
2343 IEM_MC_BEGIN(0, 2);
2344 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2346
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2350 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2351
2352 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2353 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2354
2355 IEM_MC_ADVANCE_RIP();
2356 IEM_MC_END();
2357 }
2358 return VINF_SUCCESS;
2359}
2360
2361/**
2362 * @opcode 0x29
2363 * @oppfx 66
2364 * @opcpuid sse2
2365 * @opgroup og_sse2_pcksclr_datamove
2366 * @opxcpttype 1
2367 * @optest op1=1 op2=2 -> op1=2
2368 * @optest op1=0 op2=-42 -> op1=-42
2369 */
2370FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2371{
2372 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2374 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2375 {
2376 /*
2377 * Register, register.
2378 */
2379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2380 IEM_MC_BEGIN(0, 0);
2381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2383 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2384 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2385 IEM_MC_ADVANCE_RIP();
2386 IEM_MC_END();
2387 }
2388 else
2389 {
2390 /*
2391 * Memory, register.
2392 */
2393 IEM_MC_BEGIN(0, 2);
2394 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2396
2397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2399 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2400 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2401
2402 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2403 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2404
2405 IEM_MC_ADVANCE_RIP();
2406 IEM_MC_END();
2407 }
2408 return VINF_SUCCESS;
2409}
2410
2411/* Opcode 0xf3 0x0f 0x29 - invalid */
2412/* Opcode 0xf2 0x0f 0x29 - invalid */
2413
2414
2415/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2416FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2417/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2418FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2419/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2420FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2421/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2422FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2423
2424
2425/**
2426 * @opcode 0x2b
2427 * @opcodesub !11 mr/reg
2428 * @oppfx none
2429 * @opcpuid sse
2430 * @opgroup og_sse1_cachect
2431 * @opxcpttype 1
2432 * @optest op1=1 op2=2 -> op1=2
2433 * @optest op1=0 op2=-42 -> op1=-42
2434 */
2435FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2436{
2437 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2439 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2440 {
2441 /*
2442 * memory, register.
2443 */
2444 IEM_MC_BEGIN(0, 2);
2445 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2447
2448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2451 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2452
2453 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2454 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2455
2456 IEM_MC_ADVANCE_RIP();
2457 IEM_MC_END();
2458 }
2459 /* The register, register encoding is invalid. */
2460 else
2461 return IEMOP_RAISE_INVALID_OPCODE();
2462 return VINF_SUCCESS;
2463}
2464
2465/**
2466 * @opcode 0x2b
2467 * @opcodesub !11 mr/reg
2468 * @oppfx 0x66
2469 * @opcpuid sse2
2470 * @opgroup og_sse2_cachect
2471 * @opxcpttype 1
2472 * @optest op1=1 op2=2 -> op1=2
2473 * @optest op1=0 op2=-42 -> op1=-42
2474 */
2475FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2476{
2477 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2479 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2480 {
2481 /*
2482 * memory, register.
2483 */
2484 IEM_MC_BEGIN(0, 2);
2485 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2487
2488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2490 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2491 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2492
2493 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2494 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2495
2496 IEM_MC_ADVANCE_RIP();
2497 IEM_MC_END();
2498 }
2499 /* The register, register encoding is invalid. */
2500 else
2501 return IEMOP_RAISE_INVALID_OPCODE();
2502 return VINF_SUCCESS;
2503}
2504/* Opcode 0xf3 0x0f 0x2b - invalid */
2505/* Opcode 0xf2 0x0f 0x2b - invalid */
2506
2507
2508/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2509FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2510/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2511FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2512/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2513FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2514/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2515FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2516
2517/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2518FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2519/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2520FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2521/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2522FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2523/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2524FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2525
2526/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2527FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2528/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2529FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2530/* Opcode 0xf3 0x0f 0x2e - invalid */
2531/* Opcode 0xf2 0x0f 0x2e - invalid */
2532
2533/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2534FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2535/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2536FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2537/* Opcode 0xf3 0x0f 0x2f - invalid */
2538/* Opcode 0xf2 0x0f 0x2f - invalid */
2539
2540/** Opcode 0x0f 0x30. */
2541FNIEMOP_DEF(iemOp_wrmsr)
2542{
2543 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2545 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2546}
2547
2548
2549/** Opcode 0x0f 0x31. */
2550FNIEMOP_DEF(iemOp_rdtsc)
2551{
2552 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2554 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2555}
2556
2557
2558/** Opcode 0x0f 0x33. */
2559FNIEMOP_DEF(iemOp_rdmsr)
2560{
2561 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2563 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2564}
2565
2566
2567/** Opcode 0x0f 0x34. */
2568FNIEMOP_DEF(iemOp_rdpmc)
2569{
2570 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2573}
2574
2575
2576/** Opcode 0x0f 0x34. */
2577FNIEMOP_STUB(iemOp_sysenter);
2578/** Opcode 0x0f 0x35. */
2579FNIEMOP_STUB(iemOp_sysexit);
2580/** Opcode 0x0f 0x37. */
2581FNIEMOP_STUB(iemOp_getsec);
2582
2583
2584/** Opcode 0x0f 0x38. */
2585FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2586{
2587#ifdef IEM_WITH_THREE_0F_38
2588 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2589 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2590#else
2591 IEMOP_BITCH_ABOUT_STUB();
2592 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2593#endif
2594}
2595
2596
2597/** Opcode 0x0f 0x3a. */
2598FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2599{
2600#ifdef IEM_WITH_THREE_0F_3A
2601 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2602 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2603#else
2604 IEMOP_BITCH_ABOUT_STUB();
2605 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2606#endif
2607}
2608
2609
2610/**
2611 * Implements a conditional move.
2612 *
2613 * Wish there was an obvious way to do this where we could share and reduce
2614 * code bloat.
2615 *
2616 * @param a_Cnd The conditional "microcode" operation.
2617 */
2618#define CMOV_X(a_Cnd) \
2619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2620 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2621 { \
2622 switch (pVCpu->iem.s.enmEffOpSize) \
2623 { \
2624 case IEMMODE_16BIT: \
2625 IEM_MC_BEGIN(0, 1); \
2626 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2627 a_Cnd { \
2628 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2629 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2630 } IEM_MC_ENDIF(); \
2631 IEM_MC_ADVANCE_RIP(); \
2632 IEM_MC_END(); \
2633 return VINF_SUCCESS; \
2634 \
2635 case IEMMODE_32BIT: \
2636 IEM_MC_BEGIN(0, 1); \
2637 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2638 a_Cnd { \
2639 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2640 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2641 } IEM_MC_ELSE() { \
2642 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2643 } IEM_MC_ENDIF(); \
2644 IEM_MC_ADVANCE_RIP(); \
2645 IEM_MC_END(); \
2646 return VINF_SUCCESS; \
2647 \
2648 case IEMMODE_64BIT: \
2649 IEM_MC_BEGIN(0, 1); \
2650 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2651 a_Cnd { \
2652 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2653 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2654 } IEM_MC_ENDIF(); \
2655 IEM_MC_ADVANCE_RIP(); \
2656 IEM_MC_END(); \
2657 return VINF_SUCCESS; \
2658 \
2659 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2660 } \
2661 } \
2662 else \
2663 { \
2664 switch (pVCpu->iem.s.enmEffOpSize) \
2665 { \
2666 case IEMMODE_16BIT: \
2667 IEM_MC_BEGIN(0, 2); \
2668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2669 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2671 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2672 a_Cnd { \
2673 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2674 } IEM_MC_ENDIF(); \
2675 IEM_MC_ADVANCE_RIP(); \
2676 IEM_MC_END(); \
2677 return VINF_SUCCESS; \
2678 \
2679 case IEMMODE_32BIT: \
2680 IEM_MC_BEGIN(0, 2); \
2681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2682 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2684 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2685 a_Cnd { \
2686 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2687 } IEM_MC_ELSE() { \
2688 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2689 } IEM_MC_ENDIF(); \
2690 IEM_MC_ADVANCE_RIP(); \
2691 IEM_MC_END(); \
2692 return VINF_SUCCESS; \
2693 \
2694 case IEMMODE_64BIT: \
2695 IEM_MC_BEGIN(0, 2); \
2696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2697 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2699 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2700 a_Cnd { \
2701 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2702 } IEM_MC_ENDIF(); \
2703 IEM_MC_ADVANCE_RIP(); \
2704 IEM_MC_END(); \
2705 return VINF_SUCCESS; \
2706 \
2707 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2708 } \
2709 } do {} while (0)
2710
2711
2712
2713/** Opcode 0x0f 0x40. */
2714FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2715{
2716 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2717 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2718}
2719
2720
2721/** Opcode 0x0f 0x41. */
2722FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2723{
2724 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2725 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2726}
2727
2728
2729/** Opcode 0x0f 0x42. */
2730FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2731{
2732 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2733 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2734}
2735
2736
2737/** Opcode 0x0f 0x43. */
2738FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2739{
2740 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2741 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2742}
2743
2744
2745/** Opcode 0x0f 0x44. */
2746FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2747{
2748 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2749 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2750}
2751
2752
2753/** Opcode 0x0f 0x45. */
2754FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2755{
2756 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2757 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2758}
2759
2760
2761/** Opcode 0x0f 0x46. */
2762FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2763{
2764 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2765 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2766}
2767
2768
2769/** Opcode 0x0f 0x47. */
2770FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2771{
2772 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2773 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2774}
2775
2776
2777/** Opcode 0x0f 0x48. */
2778FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2779{
2780 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2781 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2782}
2783
2784
2785/** Opcode 0x0f 0x49. */
2786FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2787{
2788 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2789 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2790}
2791
2792
2793/** Opcode 0x0f 0x4a. */
2794FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2795{
2796 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2797 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2798}
2799
2800
2801/** Opcode 0x0f 0x4b. */
2802FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2803{
2804 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2805 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2806}
2807
2808
2809/** Opcode 0x0f 0x4c. */
2810FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2811{
2812 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2813 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2814}
2815
2816
2817/** Opcode 0x0f 0x4d. */
2818FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2819{
2820 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2821 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2822}
2823
2824
2825/** Opcode 0x0f 0x4e. */
2826FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2827{
2828 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2829 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2830}
2831
2832
2833/** Opcode 0x0f 0x4f. */
2834FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2835{
2836 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2837 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2838}
2839
2840#undef CMOV_X
2841
2842/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2843FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2844/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2845FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2846/* Opcode 0xf3 0x0f 0x50 - invalid */
2847/* Opcode 0xf2 0x0f 0x50 - invalid */
2848
2849/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2850FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2851/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2852FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2853/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2854FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2855/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2856FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2857
2858/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2859FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2860/* Opcode 0x66 0x0f 0x52 - invalid */
2861/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2862FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2863/* Opcode 0xf2 0x0f 0x52 - invalid */
2864
2865/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2866FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2867/* Opcode 0x66 0x0f 0x53 - invalid */
2868/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2869FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2870/* Opcode 0xf2 0x0f 0x53 - invalid */
2871
2872/** Opcode 0x0f 0x54 - andps Vps, Wps */
2873FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2874/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2875FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2876/* Opcode 0xf3 0x0f 0x54 - invalid */
2877/* Opcode 0xf2 0x0f 0x54 - invalid */
2878
2879/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2880FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2881/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2882FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2883/* Opcode 0xf3 0x0f 0x55 - invalid */
2884/* Opcode 0xf2 0x0f 0x55 - invalid */
2885
2886/** Opcode 0x0f 0x56 - orps Vps, Wps */
2887FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2888/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2889FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2890/* Opcode 0xf3 0x0f 0x56 - invalid */
2891/* Opcode 0xf2 0x0f 0x56 - invalid */
2892
2893/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2894FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2895/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2896FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2897/* Opcode 0xf3 0x0f 0x57 - invalid */
2898/* Opcode 0xf2 0x0f 0x57 - invalid */
2899
2900/** Opcode 0x0f 0x58 - addps Vps, Wps */
2901FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2902/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2903FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2904/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2905FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2906/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2907FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2908
2909/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2910FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2911/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2912FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2913/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2914FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2915/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2916FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2917
2918/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2919FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2920/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2921FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2922/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2923FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2924/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2925FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2926
2927/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2928FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2929/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2930FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2931/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2932FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2933/* Opcode 0xf2 0x0f 0x5b - invalid */
2934
2935/** Opcode 0x0f 0x5c - subps Vps, Wps */
2936FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2937/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2938FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2939/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2940FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2941/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2942FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2943
2944/** Opcode 0x0f 0x5d - minps Vps, Wps */
2945FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2946/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2947FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2948/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2949FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2950/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2951FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2952
2953/** Opcode 0x0f 0x5e - divps Vps, Wps */
2954FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2955/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2956FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2957/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2958FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2959/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2960FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2961
2962/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2963FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2964/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2965FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2966/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2967FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2968/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2969FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2970
2971/**
2972 * Common worker for MMX instructions on the forms:
2973 * pxxxx mm1, mm2/mem32
2974 *
2975 * The 2nd operand is the first half of a register, which in the memory case
2976 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2977 * memory accessed for MMX.
2978 *
2979 * Exceptions type 4.
2980 */
2981FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2982{
2983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2985 {
2986 /*
2987 * Register, register.
2988 */
2989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2990 IEM_MC_BEGIN(2, 0);
2991 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2992 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2993 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2994 IEM_MC_PREPARE_SSE_USAGE();
2995 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2996 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2997 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2998 IEM_MC_ADVANCE_RIP();
2999 IEM_MC_END();
3000 }
3001 else
3002 {
3003 /*
3004 * Register, memory.
3005 */
3006 IEM_MC_BEGIN(2, 2);
3007 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3008 IEM_MC_LOCAL(uint64_t, uSrc);
3009 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3011
3012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3014 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3015 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3016
3017 IEM_MC_PREPARE_SSE_USAGE();
3018 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3019 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3020
3021 IEM_MC_ADVANCE_RIP();
3022 IEM_MC_END();
3023 }
3024 return VINF_SUCCESS;
3025}
3026
3027
3028/**
3029 * Common worker for SSE2 instructions on the forms:
3030 * pxxxx xmm1, xmm2/mem128
3031 *
3032 * The 2nd operand is the first half of a register, which in the memory case
3033 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3034 * memory accessed for MMX.
3035 *
3036 * Exceptions type 4.
3037 */
3038FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3039{
3040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3041 if (!pImpl->pfnU64)
3042 return IEMOP_RAISE_INVALID_OPCODE();
3043 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3044 {
3045 /*
3046 * Register, register.
3047 */
3048 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3049 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3051 IEM_MC_BEGIN(2, 0);
3052 IEM_MC_ARG(uint64_t *, pDst, 0);
3053 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3054 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3055 IEM_MC_PREPARE_FPU_USAGE();
3056 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3057 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3058 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3059 IEM_MC_ADVANCE_RIP();
3060 IEM_MC_END();
3061 }
3062 else
3063 {
3064 /*
3065 * Register, memory.
3066 */
3067 IEM_MC_BEGIN(2, 2);
3068 IEM_MC_ARG(uint64_t *, pDst, 0);
3069 IEM_MC_LOCAL(uint32_t, uSrc);
3070 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3072
3073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3075 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3076 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3077
3078 IEM_MC_PREPARE_FPU_USAGE();
3079 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3080 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3081
3082 IEM_MC_ADVANCE_RIP();
3083 IEM_MC_END();
3084 }
3085 return VINF_SUCCESS;
3086}
3087
3088
3089/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3090FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3091{
3092 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3093 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3094}
3095
3096/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3097FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3098{
3099 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3100 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3101}
3102
3103/* Opcode 0xf3 0x0f 0x60 - invalid */
3104
3105
3106/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3107FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3108{
3109 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3110 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3111}
3112
3113/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3114FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3115{
3116 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3117 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3118}
3119
3120/* Opcode 0xf3 0x0f 0x61 - invalid */
3121
3122
3123/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3124FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3125{
3126 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3127 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3128}
3129
3130/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3131FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3132{
3133 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3134 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3135}
3136
3137/* Opcode 0xf3 0x0f 0x62 - invalid */
3138
3139
3140
3141/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3142FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3143/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3144FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3145/* Opcode 0xf3 0x0f 0x63 - invalid */
3146
3147/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3148FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3149/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3150FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3151/* Opcode 0xf3 0x0f 0x64 - invalid */
3152
3153/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3154FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3155/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3156FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3157/* Opcode 0xf3 0x0f 0x65 - invalid */
3158
3159/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3160FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3161/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3162FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3163/* Opcode 0xf3 0x0f 0x66 - invalid */
3164
3165/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3166FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3167/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3168FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3169/* Opcode 0xf3 0x0f 0x67 - invalid */
3170
3171
3172/**
3173 * Common worker for MMX instructions on the form:
3174 * pxxxx mm1, mm2/mem64
3175 *
3176 * The 2nd operand is the second half of a register, which in the memory case
3177 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3178 * where it may read the full 128 bits or only the upper 64 bits.
3179 *
3180 * Exceptions type 4.
3181 */
3182FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3183{
3184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3185 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3187 {
3188 /*
3189 * Register, register.
3190 */
3191 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3192 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3194 IEM_MC_BEGIN(2, 0);
3195 IEM_MC_ARG(uint64_t *, pDst, 0);
3196 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3197 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3198 IEM_MC_PREPARE_FPU_USAGE();
3199 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3200 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3201 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3202 IEM_MC_ADVANCE_RIP();
3203 IEM_MC_END();
3204 }
3205 else
3206 {
3207 /*
3208 * Register, memory.
3209 */
3210 IEM_MC_BEGIN(2, 2);
3211 IEM_MC_ARG(uint64_t *, pDst, 0);
3212 IEM_MC_LOCAL(uint64_t, uSrc);
3213 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3215
3216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3218 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3219 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3220
3221 IEM_MC_PREPARE_FPU_USAGE();
3222 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3223 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3224
3225 IEM_MC_ADVANCE_RIP();
3226 IEM_MC_END();
3227 }
3228 return VINF_SUCCESS;
3229}
3230
3231
3232/**
3233 * Common worker for SSE2 instructions on the form:
3234 * pxxxx xmm1, xmm2/mem128
3235 *
3236 * The 2nd operand is the second half of a register, which in the memory case
3237 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3238 * where it may read the full 128 bits or only the upper 64 bits.
3239 *
3240 * Exceptions type 4.
3241 */
3242FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3243{
3244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3245 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3246 {
3247 /*
3248 * Register, register.
3249 */
3250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3251 IEM_MC_BEGIN(2, 0);
3252 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3253 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3254 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3255 IEM_MC_PREPARE_SSE_USAGE();
3256 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3257 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3258 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3259 IEM_MC_ADVANCE_RIP();
3260 IEM_MC_END();
3261 }
3262 else
3263 {
3264 /*
3265 * Register, memory.
3266 */
3267 IEM_MC_BEGIN(2, 2);
3268 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3269 IEM_MC_LOCAL(RTUINT128U, uSrc);
3270 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3272
3273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3275 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3276 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3277
3278 IEM_MC_PREPARE_SSE_USAGE();
3279 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3280 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3281
3282 IEM_MC_ADVANCE_RIP();
3283 IEM_MC_END();
3284 }
3285 return VINF_SUCCESS;
3286}
3287
3288
3289/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3290FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3291{
3292 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3293 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3294}
3295
3296/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3297FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3298{
3299 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3300 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3301}
3302/* Opcode 0xf3 0x0f 0x68 - invalid */
3303
3304
3305/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3306FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3307{
3308 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3309 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3310}
3311
3312/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3313FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3314{
3315 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3316 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3317
3318}
3319/* Opcode 0xf3 0x0f 0x69 - invalid */
3320
3321
3322/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3323FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3324{
3325 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3326 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3327}
3328
3329/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3330FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3331{
3332 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3333 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3334}
3335/* Opcode 0xf3 0x0f 0x6a - invalid */
3336
3337
3338/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3339FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3340/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3341FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3342/* Opcode 0xf3 0x0f 0x6b - invalid */
3343
3344
3345/* Opcode 0x0f 0x6c - invalid */
3346
3347/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3348FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3349{
3350 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3351 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3352}
3353
3354/* Opcode 0xf3 0x0f 0x6c - invalid */
3355/* Opcode 0xf2 0x0f 0x6c - invalid */
3356
3357
3358/* Opcode 0x0f 0x6d - invalid */
3359
3360/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3361FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3362{
3363 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3364 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3365}
3366
3367/* Opcode 0xf3 0x0f 0x6d - invalid */
3368
3369
3370FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3371{
3372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3373 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3374 {
3375 /**
3376 * @opcode 0x6e
3377 * @opcodesub rex.w=1
3378 * @oppfx none
3379 * @opcpuid mmx
3380 * @opgroup og_mmx_datamove
3381 * @opxcpttype 5
3382 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3383 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3384 */
3385 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3386 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3387 {
3388 /* MMX, greg64 */
3389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3390 IEM_MC_BEGIN(0, 1);
3391 IEM_MC_LOCAL(uint64_t, u64Tmp);
3392
3393 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3394 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3395
3396 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3397 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3398 IEM_MC_FPU_TO_MMX_MODE();
3399
3400 IEM_MC_ADVANCE_RIP();
3401 IEM_MC_END();
3402 }
3403 else
3404 {
3405 /* MMX, [mem64] */
3406 IEM_MC_BEGIN(0, 2);
3407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3408 IEM_MC_LOCAL(uint64_t, u64Tmp);
3409
3410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3412 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3413 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3414
3415 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3416 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3417 IEM_MC_FPU_TO_MMX_MODE();
3418
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 }
3422 }
3423 else
3424 {
3425 /**
3426 * @opdone
3427 * @opcode 0x6e
3428 * @opcodesub rex.w=0
3429 * @oppfx none
3430 * @opcpuid mmx
3431 * @opgroup og_mmx_datamove
3432 * @opxcpttype 5
3433 * @opfunction iemOp_movd_q_Pd_Ey
3434 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3435 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3436 */
3437 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3438 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3439 {
3440 /* MMX, greg */
3441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3442 IEM_MC_BEGIN(0, 1);
3443 IEM_MC_LOCAL(uint64_t, u64Tmp);
3444
3445 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3446 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3447
3448 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3449 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3450 IEM_MC_FPU_TO_MMX_MODE();
3451
3452 IEM_MC_ADVANCE_RIP();
3453 IEM_MC_END();
3454 }
3455 else
3456 {
3457 /* MMX, [mem] */
3458 IEM_MC_BEGIN(0, 2);
3459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3460 IEM_MC_LOCAL(uint32_t, u32Tmp);
3461
3462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3464 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3465 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3466
3467 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3468 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3469 IEM_MC_FPU_TO_MMX_MODE();
3470
3471 IEM_MC_ADVANCE_RIP();
3472 IEM_MC_END();
3473 }
3474 }
3475 return VINF_SUCCESS;
3476}
3477
3478FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3479{
3480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3481 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3482 {
3483 /**
3484 * @opcode 0x6e
3485 * @opcodesub rex.w=1
3486 * @oppfx 0x66
3487 * @opcpuid sse2
3488 * @opgroup og_sse2_simdint_datamove
3489 * @opxcpttype 5
3490 * @optest 64-bit / op1=1 op2=2 -> op1=2
3491 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3492 */
3493 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3495 {
3496 /* XMM, greg64 */
3497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3498 IEM_MC_BEGIN(0, 1);
3499 IEM_MC_LOCAL(uint64_t, u64Tmp);
3500
3501 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3502 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3503
3504 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3505 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3506
3507 IEM_MC_ADVANCE_RIP();
3508 IEM_MC_END();
3509 }
3510 else
3511 {
3512 /* XMM, [mem64] */
3513 IEM_MC_BEGIN(0, 2);
3514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3515 IEM_MC_LOCAL(uint64_t, u64Tmp);
3516
3517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3519 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3520 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3521
3522 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3523 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3524
3525 IEM_MC_ADVANCE_RIP();
3526 IEM_MC_END();
3527 }
3528 }
3529 else
3530 {
3531 /**
3532 * @opdone
3533 * @opcode 0x6e
3534 * @opcodesub rex.w=0
3535 * @oppfx 0x66
3536 * @opcpuid sse2
3537 * @opgroup og_sse2_simdint_datamove
3538 * @opxcpttype 5
3539 * @opfunction iemOp_movd_q_Vy_Ey
3540 * @optest op1=1 op2=2 -> op1=2
3541 * @optest op1=0 op2=-42 -> op1=-42
3542 */
3543 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3545 {
3546 /* XMM, greg32 */
3547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3548 IEM_MC_BEGIN(0, 1);
3549 IEM_MC_LOCAL(uint32_t, u32Tmp);
3550
3551 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3552 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3553
3554 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3555 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3556
3557 IEM_MC_ADVANCE_RIP();
3558 IEM_MC_END();
3559 }
3560 else
3561 {
3562 /* XMM, [mem32] */
3563 IEM_MC_BEGIN(0, 2);
3564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3565 IEM_MC_LOCAL(uint32_t, u32Tmp);
3566
3567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3569 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3570 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3571
3572 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3573 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3574
3575 IEM_MC_ADVANCE_RIP();
3576 IEM_MC_END();
3577 }
3578 }
3579 return VINF_SUCCESS;
3580}
3581
3582/* Opcode 0xf3 0x0f 0x6e - invalid */
3583
3584
3585/**
3586 * @opcode 0x6f
3587 * @oppfx none
3588 * @opcpuid mmx
3589 * @opgroup og_mmx_datamove
3590 * @opxcpttype 5
3591 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3592 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3593 */
3594FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3595{
3596 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3599 {
3600 /*
3601 * Register, register.
3602 */
3603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3604 IEM_MC_BEGIN(0, 1);
3605 IEM_MC_LOCAL(uint64_t, u64Tmp);
3606
3607 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3608 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3609
3610 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3611 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3612 IEM_MC_FPU_TO_MMX_MODE();
3613
3614 IEM_MC_ADVANCE_RIP();
3615 IEM_MC_END();
3616 }
3617 else
3618 {
3619 /*
3620 * Register, memory.
3621 */
3622 IEM_MC_BEGIN(0, 2);
3623 IEM_MC_LOCAL(uint64_t, u64Tmp);
3624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3625
3626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3628 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3629 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3630
3631 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3632 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3633 IEM_MC_FPU_TO_MMX_MODE();
3634
3635 IEM_MC_ADVANCE_RIP();
3636 IEM_MC_END();
3637 }
3638 return VINF_SUCCESS;
3639}
3640
3641/**
3642 * @opcode 0x6f
3643 * @oppfx 0x66
3644 * @opcpuid sse2
3645 * @opgroup og_sse2_simdint_datamove
3646 * @opxcpttype 1
3647 * @optest op1=1 op2=2 -> op1=2
3648 * @optest op1=0 op2=-42 -> op1=-42
3649 */
3650FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3651{
3652 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3654 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3655 {
3656 /*
3657 * Register, register.
3658 */
3659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3660 IEM_MC_BEGIN(0, 0);
3661
3662 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3663 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3664
3665 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3666 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3667 IEM_MC_ADVANCE_RIP();
3668 IEM_MC_END();
3669 }
3670 else
3671 {
3672 /*
3673 * Register, memory.
3674 */
3675 IEM_MC_BEGIN(0, 2);
3676 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3678
3679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3681 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3682 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3683
3684 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3685 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3686
3687 IEM_MC_ADVANCE_RIP();
3688 IEM_MC_END();
3689 }
3690 return VINF_SUCCESS;
3691}
3692
3693/**
3694 * @opcode 0x6f
3695 * @oppfx 0xf3
3696 * @opcpuid sse2
3697 * @opgroup og_sse2_simdint_datamove
3698 * @opxcpttype 4UA
3699 * @optest op1=1 op2=2 -> op1=2
3700 * @optest op1=0 op2=-42 -> op1=-42
3701 */
3702FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3703{
3704 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3706 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3707 {
3708 /*
3709 * Register, register.
3710 */
3711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3712 IEM_MC_BEGIN(0, 0);
3713 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3714 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3715 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3716 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3717 IEM_MC_ADVANCE_RIP();
3718 IEM_MC_END();
3719 }
3720 else
3721 {
3722 /*
3723 * Register, memory.
3724 */
3725 IEM_MC_BEGIN(0, 2);
3726 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3728
3729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3731 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3732 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3733 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3734 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3735
3736 IEM_MC_ADVANCE_RIP();
3737 IEM_MC_END();
3738 }
3739 return VINF_SUCCESS;
3740}
3741
3742
3743/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3744FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3745{
3746 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3749 {
3750 /*
3751 * Register, register.
3752 */
3753 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3755
3756 IEM_MC_BEGIN(3, 0);
3757 IEM_MC_ARG(uint64_t *, pDst, 0);
3758 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3759 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3760 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3761 IEM_MC_PREPARE_FPU_USAGE();
3762 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3763 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3764 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3765 IEM_MC_ADVANCE_RIP();
3766 IEM_MC_END();
3767 }
3768 else
3769 {
3770 /*
3771 * Register, memory.
3772 */
3773 IEM_MC_BEGIN(3, 2);
3774 IEM_MC_ARG(uint64_t *, pDst, 0);
3775 IEM_MC_LOCAL(uint64_t, uSrc);
3776 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3778
3779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3780 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3781 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3783 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3784
3785 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3786 IEM_MC_PREPARE_FPU_USAGE();
3787 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3788 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3789
3790 IEM_MC_ADVANCE_RIP();
3791 IEM_MC_END();
3792 }
3793 return VINF_SUCCESS;
3794}
3795
3796/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3797FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3798{
3799 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3802 {
3803 /*
3804 * Register, register.
3805 */
3806 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3808
3809 IEM_MC_BEGIN(3, 0);
3810 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3811 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3812 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3813 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3814 IEM_MC_PREPARE_SSE_USAGE();
3815 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3816 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3817 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3818 IEM_MC_ADVANCE_RIP();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 /*
3824 * Register, memory.
3825 */
3826 IEM_MC_BEGIN(3, 2);
3827 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3828 IEM_MC_LOCAL(RTUINT128U, uSrc);
3829 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3831
3832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3833 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3834 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3837
3838 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3839 IEM_MC_PREPARE_SSE_USAGE();
3840 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3841 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3842
3843 IEM_MC_ADVANCE_RIP();
3844 IEM_MC_END();
3845 }
3846 return VINF_SUCCESS;
3847}
3848
3849/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3850FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3851{
3852 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3854 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3855 {
3856 /*
3857 * Register, register.
3858 */
3859 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861
3862 IEM_MC_BEGIN(3, 0);
3863 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3864 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3865 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3866 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3867 IEM_MC_PREPARE_SSE_USAGE();
3868 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3869 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3870 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3871 IEM_MC_ADVANCE_RIP();
3872 IEM_MC_END();
3873 }
3874 else
3875 {
3876 /*
3877 * Register, memory.
3878 */
3879 IEM_MC_BEGIN(3, 2);
3880 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3881 IEM_MC_LOCAL(RTUINT128U, uSrc);
3882 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3884
3885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3886 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3887 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3889 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3890
3891 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3892 IEM_MC_PREPARE_SSE_USAGE();
3893 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3894 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3895
3896 IEM_MC_ADVANCE_RIP();
3897 IEM_MC_END();
3898 }
3899 return VINF_SUCCESS;
3900}
3901
3902/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3903FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3904{
3905 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3907 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3908 {
3909 /*
3910 * Register, register.
3911 */
3912 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3914
3915 IEM_MC_BEGIN(3, 0);
3916 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3917 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3918 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3919 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3920 IEM_MC_PREPARE_SSE_USAGE();
3921 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3922 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3923 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3924 IEM_MC_ADVANCE_RIP();
3925 IEM_MC_END();
3926 }
3927 else
3928 {
3929 /*
3930 * Register, memory.
3931 */
3932 IEM_MC_BEGIN(3, 2);
3933 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3934 IEM_MC_LOCAL(RTUINT128U, uSrc);
3935 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3937
3938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3939 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3940 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3942 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3943
3944 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3945 IEM_MC_PREPARE_SSE_USAGE();
3946 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3947 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3948
3949 IEM_MC_ADVANCE_RIP();
3950 IEM_MC_END();
3951 }
3952 return VINF_SUCCESS;
3953}
3954
3955
3956/** Opcode 0x0f 0x71 11/2. */
3957FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3958
3959/** Opcode 0x66 0x0f 0x71 11/2. */
3960FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3961
3962/** Opcode 0x0f 0x71 11/4. */
3963FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3964
3965/** Opcode 0x66 0x0f 0x71 11/4. */
3966FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3967
3968/** Opcode 0x0f 0x71 11/6. */
3969FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3970
3971/** Opcode 0x66 0x0f 0x71 11/6. */
3972FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3973
3974
3975/**
3976 * Group 12 jump table for register variant.
3977 */
3978IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3979{
3980 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3981 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3982 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3983 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3984 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3985 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3986 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3987 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3988};
3989AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3990
3991
3992/** Opcode 0x0f 0x71. */
3993FNIEMOP_DEF(iemOp_Grp12)
3994{
3995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3996 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3997 /* register, register */
3998 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3999 + pVCpu->iem.s.idxPrefix], bRm);
4000 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4001}
4002
4003
4004/** Opcode 0x0f 0x72 11/2. */
4005FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4006
4007/** Opcode 0x66 0x0f 0x72 11/2. */
4008FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4009
4010/** Opcode 0x0f 0x72 11/4. */
4011FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4012
4013/** Opcode 0x66 0x0f 0x72 11/4. */
4014FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4015
4016/** Opcode 0x0f 0x72 11/6. */
4017FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4018
4019/** Opcode 0x66 0x0f 0x72 11/6. */
4020FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4021
4022
4023/**
4024 * Group 13 jump table for register variant.
4025 */
4026IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4027{
4028 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4029 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4030 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4031 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4032 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4033 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4034 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4035 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4036};
4037AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4038
4039/** Opcode 0x0f 0x72. */
4040FNIEMOP_DEF(iemOp_Grp13)
4041{
4042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4043 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4044 /* register, register */
4045 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4046 + pVCpu->iem.s.idxPrefix], bRm);
4047 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4048}
4049
4050
4051/** Opcode 0x0f 0x73 11/2. */
4052FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4053
4054/** Opcode 0x66 0x0f 0x73 11/2. */
4055FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4056
4057/** Opcode 0x66 0x0f 0x73 11/3. */
4058FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4059
4060/** Opcode 0x0f 0x73 11/6. */
4061FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4062
4063/** Opcode 0x66 0x0f 0x73 11/6. */
4064FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4065
4066/** Opcode 0x66 0x0f 0x73 11/7. */
4067FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4068
4069/**
4070 * Group 14 jump table for register variant.
4071 */
4072IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4073{
4074 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4075 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4076 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4077 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4078 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4079 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4080 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4081 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4082};
4083AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4084
4085
4086/** Opcode 0x0f 0x73. */
4087FNIEMOP_DEF(iemOp_Grp14)
4088{
4089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4090 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4091 /* register, register */
4092 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4093 + pVCpu->iem.s.idxPrefix], bRm);
4094 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4095}
4096
4097
4098/**
4099 * Common worker for MMX instructions on the form:
4100 * pxxx mm1, mm2/mem64
4101 */
4102FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4103{
4104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4105 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4106 {
4107 /*
4108 * Register, register.
4109 */
4110 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4111 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4113 IEM_MC_BEGIN(2, 0);
4114 IEM_MC_ARG(uint64_t *, pDst, 0);
4115 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4116 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4117 IEM_MC_PREPARE_FPU_USAGE();
4118 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4119 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4120 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4121 IEM_MC_ADVANCE_RIP();
4122 IEM_MC_END();
4123 }
4124 else
4125 {
4126 /*
4127 * Register, memory.
4128 */
4129 IEM_MC_BEGIN(2, 2);
4130 IEM_MC_ARG(uint64_t *, pDst, 0);
4131 IEM_MC_LOCAL(uint64_t, uSrc);
4132 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4134
4135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4137 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4138 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4139
4140 IEM_MC_PREPARE_FPU_USAGE();
4141 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4142 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4143
4144 IEM_MC_ADVANCE_RIP();
4145 IEM_MC_END();
4146 }
4147 return VINF_SUCCESS;
4148}
4149
4150
4151/**
4152 * Common worker for SSE2 instructions on the forms:
4153 * pxxx xmm1, xmm2/mem128
4154 *
4155 * Proper alignment of the 128-bit operand is enforced.
4156 * Exceptions type 4. SSE2 cpuid checks.
4157 */
4158FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4159{
4160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4162 {
4163 /*
4164 * Register, register.
4165 */
4166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4167 IEM_MC_BEGIN(2, 0);
4168 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4169 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4171 IEM_MC_PREPARE_SSE_USAGE();
4172 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4173 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4174 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4175 IEM_MC_ADVANCE_RIP();
4176 IEM_MC_END();
4177 }
4178 else
4179 {
4180 /*
4181 * Register, memory.
4182 */
4183 IEM_MC_BEGIN(2, 2);
4184 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4185 IEM_MC_LOCAL(RTUINT128U, uSrc);
4186 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4188
4189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4191 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4192 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4193
4194 IEM_MC_PREPARE_SSE_USAGE();
4195 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4196 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4197
4198 IEM_MC_ADVANCE_RIP();
4199 IEM_MC_END();
4200 }
4201 return VINF_SUCCESS;
4202}
4203
4204
4205/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4206FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4207{
4208 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4209 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4210}
4211
4212/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4213FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4214{
4215 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4216 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4217}
4218
4219/* Opcode 0xf3 0x0f 0x74 - invalid */
4220/* Opcode 0xf2 0x0f 0x74 - invalid */
4221
4222
4223/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4224FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4225{
4226 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4227 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4228}
4229
4230/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4231FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4232{
4233 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4234 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4235}
4236
4237/* Opcode 0xf3 0x0f 0x75 - invalid */
4238/* Opcode 0xf2 0x0f 0x75 - invalid */
4239
4240
4241/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4242FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4243{
4244 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4245 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4246}
4247
4248/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4249FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4250{
4251 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4252 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4253}
4254
4255/* Opcode 0xf3 0x0f 0x76 - invalid */
4256/* Opcode 0xf2 0x0f 0x76 - invalid */
4257
4258
4259/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4260FNIEMOP_DEF(iemOp_emms)
4261{
4262 IEMOP_MNEMONIC(emms, "emms");
4263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4264
4265 IEM_MC_BEGIN(0,0);
4266 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4267 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4268 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4269 IEM_MC_FPU_FROM_MMX_MODE();
4270 IEM_MC_ADVANCE_RIP();
4271 IEM_MC_END();
4272 return VINF_SUCCESS;
4273}
4274
4275/* Opcode 0x66 0x0f 0x77 - invalid */
4276/* Opcode 0xf3 0x0f 0x77 - invalid */
4277/* Opcode 0xf2 0x0f 0x77 - invalid */
4278
4279/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4280FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4281/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4282FNIEMOP_STUB(iemOp_AmdGrp17);
4283/* Opcode 0xf3 0x0f 0x78 - invalid */
4284/* Opcode 0xf2 0x0f 0x78 - invalid */
4285
4286/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4287FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4288/* Opcode 0x66 0x0f 0x79 - invalid */
4289/* Opcode 0xf3 0x0f 0x79 - invalid */
4290/* Opcode 0xf2 0x0f 0x79 - invalid */
4291
4292/* Opcode 0x0f 0x7a - invalid */
4293/* Opcode 0x66 0x0f 0x7a - invalid */
4294/* Opcode 0xf3 0x0f 0x7a - invalid */
4295/* Opcode 0xf2 0x0f 0x7a - invalid */
4296
4297/* Opcode 0x0f 0x7b - invalid */
4298/* Opcode 0x66 0x0f 0x7b - invalid */
4299/* Opcode 0xf3 0x0f 0x7b - invalid */
4300/* Opcode 0xf2 0x0f 0x7b - invalid */
4301
4302/* Opcode 0x0f 0x7c - invalid */
4303/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4304FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4305/* Opcode 0xf3 0x0f 0x7c - invalid */
4306/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4307FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4308
4309/* Opcode 0x0f 0x7d - invalid */
4310/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4311FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4312/* Opcode 0xf3 0x0f 0x7d - invalid */
4313/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4314FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4315
4316
4317/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4318FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4319{
4320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4321 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4322 {
4323 /**
4324 * @opcode 0x7e
4325 * @opcodesub rex.w=1
4326 * @oppfx none
4327 * @opcpuid mmx
4328 * @opgroup og_mmx_datamove
4329 * @opxcpttype 5
4330 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4331 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4332 */
4333 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4335 {
4336 /* greg64, MMX */
4337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4338 IEM_MC_BEGIN(0, 1);
4339 IEM_MC_LOCAL(uint64_t, u64Tmp);
4340
4341 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4342 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4343
4344 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4345 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4346 IEM_MC_FPU_TO_MMX_MODE();
4347
4348 IEM_MC_ADVANCE_RIP();
4349 IEM_MC_END();
4350 }
4351 else
4352 {
4353 /* [mem64], MMX */
4354 IEM_MC_BEGIN(0, 2);
4355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4356 IEM_MC_LOCAL(uint64_t, u64Tmp);
4357
4358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4360 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4361 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4362
4363 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4364 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4365 IEM_MC_FPU_TO_MMX_MODE();
4366
4367 IEM_MC_ADVANCE_RIP();
4368 IEM_MC_END();
4369 }
4370 }
4371 else
4372 {
4373 /**
4374 * @opdone
4375 * @opcode 0x7e
4376 * @opcodesub rex.w=0
4377 * @oppfx none
4378 * @opcpuid mmx
4379 * @opgroup og_mmx_datamove
4380 * @opxcpttype 5
4381 * @opfunction iemOp_movd_q_Pd_Ey
4382 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4383 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4384 */
4385 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4386 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4387 {
4388 /* greg32, MMX */
4389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4390 IEM_MC_BEGIN(0, 1);
4391 IEM_MC_LOCAL(uint32_t, u32Tmp);
4392
4393 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4394 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4395
4396 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4397 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4398 IEM_MC_FPU_TO_MMX_MODE();
4399
4400 IEM_MC_ADVANCE_RIP();
4401 IEM_MC_END();
4402 }
4403 else
4404 {
4405 /* [mem32], MMX */
4406 IEM_MC_BEGIN(0, 2);
4407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4408 IEM_MC_LOCAL(uint32_t, u32Tmp);
4409
4410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4412 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4413 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4414
4415 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4416 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4417 IEM_MC_FPU_TO_MMX_MODE();
4418
4419 IEM_MC_ADVANCE_RIP();
4420 IEM_MC_END();
4421 }
4422 }
4423 return VINF_SUCCESS;
4424
4425}
4426
4427
4428FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4429{
4430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4431 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4432 {
4433 /**
4434 * @opcode 0x7e
4435 * @opcodesub rex.w=1
4436 * @oppfx 0x66
4437 * @opcpuid sse2
4438 * @opgroup og_sse2_simdint_datamove
4439 * @opxcpttype 5
4440 * @optest 64-bit / op1=1 op2=2 -> op1=2
4441 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4442 */
4443 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4444 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4445 {
4446 /* greg64, XMM */
4447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4448 IEM_MC_BEGIN(0, 1);
4449 IEM_MC_LOCAL(uint64_t, u64Tmp);
4450
4451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4453
4454 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4455 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4456
4457 IEM_MC_ADVANCE_RIP();
4458 IEM_MC_END();
4459 }
4460 else
4461 {
4462 /* [mem64], XMM */
4463 IEM_MC_BEGIN(0, 2);
4464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4465 IEM_MC_LOCAL(uint64_t, u64Tmp);
4466
4467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4469 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4470 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4471
4472 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4473 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4474
4475 IEM_MC_ADVANCE_RIP();
4476 IEM_MC_END();
4477 }
4478 }
4479 else
4480 {
4481 /**
4482 * @opdone
4483 * @opcode 0x7e
4484 * @opcodesub rex.w=0
4485 * @oppfx 0x66
4486 * @opcpuid sse2
4487 * @opgroup og_sse2_simdint_datamove
4488 * @opxcpttype 5
4489 * @opfunction iemOp_movd_q_Vy_Ey
4490 * @optest op1=1 op2=2 -> op1=2
4491 * @optest op1=0 op2=-42 -> op1=-42
4492 */
4493 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4495 {
4496 /* greg32, XMM */
4497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4498 IEM_MC_BEGIN(0, 1);
4499 IEM_MC_LOCAL(uint32_t, u32Tmp);
4500
4501 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4502 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4503
4504 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4505 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4506
4507 IEM_MC_ADVANCE_RIP();
4508 IEM_MC_END();
4509 }
4510 else
4511 {
4512 /* [mem32], XMM */
4513 IEM_MC_BEGIN(0, 2);
4514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4515 IEM_MC_LOCAL(uint32_t, u32Tmp);
4516
4517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4519 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4520 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4521
4522 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4523 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4524
4525 IEM_MC_ADVANCE_RIP();
4526 IEM_MC_END();
4527 }
4528 }
4529 return VINF_SUCCESS;
4530
4531}
4532
4533/**
4534 * @opcode 0x7e
4535 * @oppfx 0xf3
4536 * @opcpuid sse2
4537 * @opgroup og_sse2_pcksclr_datamove
4538 * @opxcpttype none
4539 * @optest op1=1 op2=2 -> op1=2
4540 * @optest op1=0 op2=-42 -> op1=-42
4541 */
4542FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4543{
4544 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4547 {
4548 /*
4549 * Register, register.
4550 */
4551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4552 IEM_MC_BEGIN(0, 2);
4553 IEM_MC_LOCAL(uint64_t, uSrc);
4554
4555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4556 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4557
4558 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4559 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4560
4561 IEM_MC_ADVANCE_RIP();
4562 IEM_MC_END();
4563 }
4564 else
4565 {
4566 /*
4567 * Memory, register.
4568 */
4569 IEM_MC_BEGIN(0, 2);
4570 IEM_MC_LOCAL(uint64_t, uSrc);
4571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4572
4573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4575 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4576 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4577
4578 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4579 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4580
4581 IEM_MC_ADVANCE_RIP();
4582 IEM_MC_END();
4583 }
4584 return VINF_SUCCESS;
4585}
4586
4587/* Opcode 0xf2 0x0f 0x7e - invalid */
4588
4589
4590/** Opcode 0x0f 0x7f - movq Qq, Pq */
4591FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4592{
4593 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4595 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4596 {
4597 /*
4598 * Register, register.
4599 */
4600 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4601 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4603 IEM_MC_BEGIN(0, 1);
4604 IEM_MC_LOCAL(uint64_t, u64Tmp);
4605 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4606 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4607 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4608 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4609 IEM_MC_ADVANCE_RIP();
4610 IEM_MC_END();
4611 }
4612 else
4613 {
4614 /*
4615 * Register, memory.
4616 */
4617 IEM_MC_BEGIN(0, 2);
4618 IEM_MC_LOCAL(uint64_t, u64Tmp);
4619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4620
4621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4623 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4624 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4625
4626 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4627 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4628
4629 IEM_MC_ADVANCE_RIP();
4630 IEM_MC_END();
4631 }
4632 return VINF_SUCCESS;
4633}
4634
4635/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4636FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4637{
4638 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4641 {
4642 /*
4643 * Register, register.
4644 */
4645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4646 IEM_MC_BEGIN(0, 0);
4647 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4649 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4650 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4651 IEM_MC_ADVANCE_RIP();
4652 IEM_MC_END();
4653 }
4654 else
4655 {
4656 /*
4657 * Register, memory.
4658 */
4659 IEM_MC_BEGIN(0, 2);
4660 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4662
4663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4665 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4667
4668 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4669 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4670
4671 IEM_MC_ADVANCE_RIP();
4672 IEM_MC_END();
4673 }
4674 return VINF_SUCCESS;
4675}
4676
4677/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4678FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4679{
4680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4681 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4683 {
4684 /*
4685 * Register, register.
4686 */
4687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4688 IEM_MC_BEGIN(0, 0);
4689 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4690 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4691 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4692 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4693 IEM_MC_ADVANCE_RIP();
4694 IEM_MC_END();
4695 }
4696 else
4697 {
4698 /*
4699 * Register, memory.
4700 */
4701 IEM_MC_BEGIN(0, 2);
4702 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4704
4705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4707 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4708 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4709
4710 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4711 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4712
4713 IEM_MC_ADVANCE_RIP();
4714 IEM_MC_END();
4715 }
4716 return VINF_SUCCESS;
4717}
4718
4719/* Opcode 0xf2 0x0f 0x7f - invalid */
4720
4721
4722
4723/** Opcode 0x0f 0x80. */
4724FNIEMOP_DEF(iemOp_jo_Jv)
4725{
4726 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4727 IEMOP_HLP_MIN_386();
4728 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4729 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4730 {
4731 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4733
4734 IEM_MC_BEGIN(0, 0);
4735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4736 IEM_MC_REL_JMP_S16(i16Imm);
4737 } IEM_MC_ELSE() {
4738 IEM_MC_ADVANCE_RIP();
4739 } IEM_MC_ENDIF();
4740 IEM_MC_END();
4741 }
4742 else
4743 {
4744 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4746
4747 IEM_MC_BEGIN(0, 0);
4748 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4749 IEM_MC_REL_JMP_S32(i32Imm);
4750 } IEM_MC_ELSE() {
4751 IEM_MC_ADVANCE_RIP();
4752 } IEM_MC_ENDIF();
4753 IEM_MC_END();
4754 }
4755 return VINF_SUCCESS;
4756}
4757
4758
4759/** Opcode 0x0f 0x81. */
4760FNIEMOP_DEF(iemOp_jno_Jv)
4761{
4762 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4763 IEMOP_HLP_MIN_386();
4764 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4765 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4766 {
4767 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4769
4770 IEM_MC_BEGIN(0, 0);
4771 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4772 IEM_MC_ADVANCE_RIP();
4773 } IEM_MC_ELSE() {
4774 IEM_MC_REL_JMP_S16(i16Imm);
4775 } IEM_MC_ENDIF();
4776 IEM_MC_END();
4777 }
4778 else
4779 {
4780 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782
4783 IEM_MC_BEGIN(0, 0);
4784 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4785 IEM_MC_ADVANCE_RIP();
4786 } IEM_MC_ELSE() {
4787 IEM_MC_REL_JMP_S32(i32Imm);
4788 } IEM_MC_ENDIF();
4789 IEM_MC_END();
4790 }
4791 return VINF_SUCCESS;
4792}
4793
4794
4795/** Opcode 0x0f 0x82. */
4796FNIEMOP_DEF(iemOp_jc_Jv)
4797{
4798 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4799 IEMOP_HLP_MIN_386();
4800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4801 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4802 {
4803 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4805
4806 IEM_MC_BEGIN(0, 0);
4807 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4808 IEM_MC_REL_JMP_S16(i16Imm);
4809 } IEM_MC_ELSE() {
4810 IEM_MC_ADVANCE_RIP();
4811 } IEM_MC_ENDIF();
4812 IEM_MC_END();
4813 }
4814 else
4815 {
4816 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4818
4819 IEM_MC_BEGIN(0, 0);
4820 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4821 IEM_MC_REL_JMP_S32(i32Imm);
4822 } IEM_MC_ELSE() {
4823 IEM_MC_ADVANCE_RIP();
4824 } IEM_MC_ENDIF();
4825 IEM_MC_END();
4826 }
4827 return VINF_SUCCESS;
4828}
4829
4830
4831/** Opcode 0x0f 0x83. */
4832FNIEMOP_DEF(iemOp_jnc_Jv)
4833{
4834 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4835 IEMOP_HLP_MIN_386();
4836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4837 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4838 {
4839 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4841
4842 IEM_MC_BEGIN(0, 0);
4843 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4844 IEM_MC_ADVANCE_RIP();
4845 } IEM_MC_ELSE() {
4846 IEM_MC_REL_JMP_S16(i16Imm);
4847 } IEM_MC_ENDIF();
4848 IEM_MC_END();
4849 }
4850 else
4851 {
4852 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4854
4855 IEM_MC_BEGIN(0, 0);
4856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4857 IEM_MC_ADVANCE_RIP();
4858 } IEM_MC_ELSE() {
4859 IEM_MC_REL_JMP_S32(i32Imm);
4860 } IEM_MC_ENDIF();
4861 IEM_MC_END();
4862 }
4863 return VINF_SUCCESS;
4864}
4865
4866
4867/** Opcode 0x0f 0x84. */
4868FNIEMOP_DEF(iemOp_je_Jv)
4869{
4870 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4871 IEMOP_HLP_MIN_386();
4872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4873 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4874 {
4875 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4877
4878 IEM_MC_BEGIN(0, 0);
4879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4880 IEM_MC_REL_JMP_S16(i16Imm);
4881 } IEM_MC_ELSE() {
4882 IEM_MC_ADVANCE_RIP();
4883 } IEM_MC_ENDIF();
4884 IEM_MC_END();
4885 }
4886 else
4887 {
4888 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4890
4891 IEM_MC_BEGIN(0, 0);
4892 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4893 IEM_MC_REL_JMP_S32(i32Imm);
4894 } IEM_MC_ELSE() {
4895 IEM_MC_ADVANCE_RIP();
4896 } IEM_MC_ENDIF();
4897 IEM_MC_END();
4898 }
4899 return VINF_SUCCESS;
4900}
4901
4902
4903/** Opcode 0x0f 0x85. */
4904FNIEMOP_DEF(iemOp_jne_Jv)
4905{
4906 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4907 IEMOP_HLP_MIN_386();
4908 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4909 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4910 {
4911 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4913
4914 IEM_MC_BEGIN(0, 0);
4915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4916 IEM_MC_ADVANCE_RIP();
4917 } IEM_MC_ELSE() {
4918 IEM_MC_REL_JMP_S16(i16Imm);
4919 } IEM_MC_ENDIF();
4920 IEM_MC_END();
4921 }
4922 else
4923 {
4924 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4926
4927 IEM_MC_BEGIN(0, 0);
4928 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4929 IEM_MC_ADVANCE_RIP();
4930 } IEM_MC_ELSE() {
4931 IEM_MC_REL_JMP_S32(i32Imm);
4932 } IEM_MC_ENDIF();
4933 IEM_MC_END();
4934 }
4935 return VINF_SUCCESS;
4936}
4937
4938
4939/** Opcode 0x0f 0x86. */
4940FNIEMOP_DEF(iemOp_jbe_Jv)
4941{
4942 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4943 IEMOP_HLP_MIN_386();
4944 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4945 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4946 {
4947 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4949
4950 IEM_MC_BEGIN(0, 0);
4951 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4952 IEM_MC_REL_JMP_S16(i16Imm);
4953 } IEM_MC_ELSE() {
4954 IEM_MC_ADVANCE_RIP();
4955 } IEM_MC_ENDIF();
4956 IEM_MC_END();
4957 }
4958 else
4959 {
4960 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4962
4963 IEM_MC_BEGIN(0, 0);
4964 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4965 IEM_MC_REL_JMP_S32(i32Imm);
4966 } IEM_MC_ELSE() {
4967 IEM_MC_ADVANCE_RIP();
4968 } IEM_MC_ENDIF();
4969 IEM_MC_END();
4970 }
4971 return VINF_SUCCESS;
4972}
4973
4974
4975/** Opcode 0x0f 0x87. */
4976FNIEMOP_DEF(iemOp_jnbe_Jv)
4977{
4978 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4979 IEMOP_HLP_MIN_386();
4980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4981 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4982 {
4983 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4985
4986 IEM_MC_BEGIN(0, 0);
4987 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4988 IEM_MC_ADVANCE_RIP();
4989 } IEM_MC_ELSE() {
4990 IEM_MC_REL_JMP_S16(i16Imm);
4991 } IEM_MC_ENDIF();
4992 IEM_MC_END();
4993 }
4994 else
4995 {
4996 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4998
4999 IEM_MC_BEGIN(0, 0);
5000 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5001 IEM_MC_ADVANCE_RIP();
5002 } IEM_MC_ELSE() {
5003 IEM_MC_REL_JMP_S32(i32Imm);
5004 } IEM_MC_ENDIF();
5005 IEM_MC_END();
5006 }
5007 return VINF_SUCCESS;
5008}
5009
5010
5011/** Opcode 0x0f 0x88. */
5012FNIEMOP_DEF(iemOp_js_Jv)
5013{
5014 IEMOP_MNEMONIC(js_Jv, "js Jv");
5015 IEMOP_HLP_MIN_386();
5016 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5017 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5018 {
5019 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5021
5022 IEM_MC_BEGIN(0, 0);
5023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5024 IEM_MC_REL_JMP_S16(i16Imm);
5025 } IEM_MC_ELSE() {
5026 IEM_MC_ADVANCE_RIP();
5027 } IEM_MC_ENDIF();
5028 IEM_MC_END();
5029 }
5030 else
5031 {
5032 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5034
5035 IEM_MC_BEGIN(0, 0);
5036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5037 IEM_MC_REL_JMP_S32(i32Imm);
5038 } IEM_MC_ELSE() {
5039 IEM_MC_ADVANCE_RIP();
5040 } IEM_MC_ENDIF();
5041 IEM_MC_END();
5042 }
5043 return VINF_SUCCESS;
5044}
5045
5046
5047/** Opcode 0x0f 0x89. */
5048FNIEMOP_DEF(iemOp_jns_Jv)
5049{
5050 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5051 IEMOP_HLP_MIN_386();
5052 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5053 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5054 {
5055 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5057
5058 IEM_MC_BEGIN(0, 0);
5059 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5060 IEM_MC_ADVANCE_RIP();
5061 } IEM_MC_ELSE() {
5062 IEM_MC_REL_JMP_S16(i16Imm);
5063 } IEM_MC_ENDIF();
5064 IEM_MC_END();
5065 }
5066 else
5067 {
5068 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5070
5071 IEM_MC_BEGIN(0, 0);
5072 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5073 IEM_MC_ADVANCE_RIP();
5074 } IEM_MC_ELSE() {
5075 IEM_MC_REL_JMP_S32(i32Imm);
5076 } IEM_MC_ENDIF();
5077 IEM_MC_END();
5078 }
5079 return VINF_SUCCESS;
5080}
5081
5082
5083/** Opcode 0x0f 0x8a. */
5084FNIEMOP_DEF(iemOp_jp_Jv)
5085{
5086 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5087 IEMOP_HLP_MIN_386();
5088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5089 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5090 {
5091 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5093
5094 IEM_MC_BEGIN(0, 0);
5095 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5096 IEM_MC_REL_JMP_S16(i16Imm);
5097 } IEM_MC_ELSE() {
5098 IEM_MC_ADVANCE_RIP();
5099 } IEM_MC_ENDIF();
5100 IEM_MC_END();
5101 }
5102 else
5103 {
5104 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5106
5107 IEM_MC_BEGIN(0, 0);
5108 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5109 IEM_MC_REL_JMP_S32(i32Imm);
5110 } IEM_MC_ELSE() {
5111 IEM_MC_ADVANCE_RIP();
5112 } IEM_MC_ENDIF();
5113 IEM_MC_END();
5114 }
5115 return VINF_SUCCESS;
5116}
5117
5118
5119/** Opcode 0x0f 0x8b. */
5120FNIEMOP_DEF(iemOp_jnp_Jv)
5121{
5122 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5123 IEMOP_HLP_MIN_386();
5124 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5125 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5126 {
5127 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5129
5130 IEM_MC_BEGIN(0, 0);
5131 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5132 IEM_MC_ADVANCE_RIP();
5133 } IEM_MC_ELSE() {
5134 IEM_MC_REL_JMP_S16(i16Imm);
5135 } IEM_MC_ENDIF();
5136 IEM_MC_END();
5137 }
5138 else
5139 {
5140 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5142
5143 IEM_MC_BEGIN(0, 0);
5144 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5145 IEM_MC_ADVANCE_RIP();
5146 } IEM_MC_ELSE() {
5147 IEM_MC_REL_JMP_S32(i32Imm);
5148 } IEM_MC_ENDIF();
5149 IEM_MC_END();
5150 }
5151 return VINF_SUCCESS;
5152}
5153
5154
5155/** Opcode 0x0f 0x8c. */
5156FNIEMOP_DEF(iemOp_jl_Jv)
5157{
5158 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5159 IEMOP_HLP_MIN_386();
5160 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5161 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5162 {
5163 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165
5166 IEM_MC_BEGIN(0, 0);
5167 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5168 IEM_MC_REL_JMP_S16(i16Imm);
5169 } IEM_MC_ELSE() {
5170 IEM_MC_ADVANCE_RIP();
5171 } IEM_MC_ENDIF();
5172 IEM_MC_END();
5173 }
5174 else
5175 {
5176 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5178
5179 IEM_MC_BEGIN(0, 0);
5180 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5181 IEM_MC_REL_JMP_S32(i32Imm);
5182 } IEM_MC_ELSE() {
5183 IEM_MC_ADVANCE_RIP();
5184 } IEM_MC_ENDIF();
5185 IEM_MC_END();
5186 }
5187 return VINF_SUCCESS;
5188}
5189
5190
5191/** Opcode 0x0f 0x8d. */
5192FNIEMOP_DEF(iemOp_jnl_Jv)
5193{
5194 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5195 IEMOP_HLP_MIN_386();
5196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5197 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5198 {
5199 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5201
5202 IEM_MC_BEGIN(0, 0);
5203 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5204 IEM_MC_ADVANCE_RIP();
5205 } IEM_MC_ELSE() {
5206 IEM_MC_REL_JMP_S16(i16Imm);
5207 } IEM_MC_ENDIF();
5208 IEM_MC_END();
5209 }
5210 else
5211 {
5212 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5214
5215 IEM_MC_BEGIN(0, 0);
5216 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5217 IEM_MC_ADVANCE_RIP();
5218 } IEM_MC_ELSE() {
5219 IEM_MC_REL_JMP_S32(i32Imm);
5220 } IEM_MC_ENDIF();
5221 IEM_MC_END();
5222 }
5223 return VINF_SUCCESS;
5224}
5225
5226
5227/** Opcode 0x0f 0x8e. */
5228FNIEMOP_DEF(iemOp_jle_Jv)
5229{
5230 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5231 IEMOP_HLP_MIN_386();
5232 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5233 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5234 {
5235 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237
5238 IEM_MC_BEGIN(0, 0);
5239 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5240 IEM_MC_REL_JMP_S16(i16Imm);
5241 } IEM_MC_ELSE() {
5242 IEM_MC_ADVANCE_RIP();
5243 } IEM_MC_ENDIF();
5244 IEM_MC_END();
5245 }
5246 else
5247 {
5248 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5250
5251 IEM_MC_BEGIN(0, 0);
5252 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5253 IEM_MC_REL_JMP_S32(i32Imm);
5254 } IEM_MC_ELSE() {
5255 IEM_MC_ADVANCE_RIP();
5256 } IEM_MC_ENDIF();
5257 IEM_MC_END();
5258 }
5259 return VINF_SUCCESS;
5260}
5261
5262
5263/** Opcode 0x0f 0x8f. */
5264FNIEMOP_DEF(iemOp_jnle_Jv)
5265{
5266 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5267 IEMOP_HLP_MIN_386();
5268 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5269 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5270 {
5271 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5273
5274 IEM_MC_BEGIN(0, 0);
5275 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5276 IEM_MC_ADVANCE_RIP();
5277 } IEM_MC_ELSE() {
5278 IEM_MC_REL_JMP_S16(i16Imm);
5279 } IEM_MC_ENDIF();
5280 IEM_MC_END();
5281 }
5282 else
5283 {
5284 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5286
5287 IEM_MC_BEGIN(0, 0);
5288 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5289 IEM_MC_ADVANCE_RIP();
5290 } IEM_MC_ELSE() {
5291 IEM_MC_REL_JMP_S32(i32Imm);
5292 } IEM_MC_ENDIF();
5293 IEM_MC_END();
5294 }
5295 return VINF_SUCCESS;
5296}
5297
5298
5299/** Opcode 0x0f 0x90. */
5300FNIEMOP_DEF(iemOp_seto_Eb)
5301{
5302 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5303 IEMOP_HLP_MIN_386();
5304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5305
5306 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5307 * any way. AMD says it's "unused", whatever that means. We're
5308 * ignoring for now. */
5309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5310 {
5311 /* register target */
5312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5313 IEM_MC_BEGIN(0, 0);
5314 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5315 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5316 } IEM_MC_ELSE() {
5317 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5318 } IEM_MC_ENDIF();
5319 IEM_MC_ADVANCE_RIP();
5320 IEM_MC_END();
5321 }
5322 else
5323 {
5324 /* memory target */
5325 IEM_MC_BEGIN(0, 1);
5326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5329 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5330 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5331 } IEM_MC_ELSE() {
5332 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5333 } IEM_MC_ENDIF();
5334 IEM_MC_ADVANCE_RIP();
5335 IEM_MC_END();
5336 }
5337 return VINF_SUCCESS;
5338}
5339
5340
5341/** Opcode 0x0f 0x91. */
5342FNIEMOP_DEF(iemOp_setno_Eb)
5343{
5344 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5345 IEMOP_HLP_MIN_386();
5346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5347
5348 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5349 * any way. AMD says it's "unused", whatever that means. We're
5350 * ignoring for now. */
5351 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5352 {
5353 /* register target */
5354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5355 IEM_MC_BEGIN(0, 0);
5356 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5357 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5358 } IEM_MC_ELSE() {
5359 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5360 } IEM_MC_ENDIF();
5361 IEM_MC_ADVANCE_RIP();
5362 IEM_MC_END();
5363 }
5364 else
5365 {
5366 /* memory target */
5367 IEM_MC_BEGIN(0, 1);
5368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5371 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5372 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5373 } IEM_MC_ELSE() {
5374 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5375 } IEM_MC_ENDIF();
5376 IEM_MC_ADVANCE_RIP();
5377 IEM_MC_END();
5378 }
5379 return VINF_SUCCESS;
5380}
5381
5382
5383/** Opcode 0x0f 0x92. */
5384FNIEMOP_DEF(iemOp_setc_Eb)
5385{
5386 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5387 IEMOP_HLP_MIN_386();
5388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5389
5390 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5391 * any way. AMD says it's "unused", whatever that means. We're
5392 * ignoring for now. */
5393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5394 {
5395 /* register target */
5396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5397 IEM_MC_BEGIN(0, 0);
5398 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5399 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5400 } IEM_MC_ELSE() {
5401 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5402 } IEM_MC_ENDIF();
5403 IEM_MC_ADVANCE_RIP();
5404 IEM_MC_END();
5405 }
5406 else
5407 {
5408 /* memory target */
5409 IEM_MC_BEGIN(0, 1);
5410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5413 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5414 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5415 } IEM_MC_ELSE() {
5416 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5417 } IEM_MC_ENDIF();
5418 IEM_MC_ADVANCE_RIP();
5419 IEM_MC_END();
5420 }
5421 return VINF_SUCCESS;
5422}
5423
5424
5425/** Opcode 0x0f 0x93. */
5426FNIEMOP_DEF(iemOp_setnc_Eb)
5427{
5428 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5429 IEMOP_HLP_MIN_386();
5430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5431
5432 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5433 * any way. AMD says it's "unused", whatever that means. We're
5434 * ignoring for now. */
5435 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5436 {
5437 /* register target */
5438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5439 IEM_MC_BEGIN(0, 0);
5440 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5441 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5442 } IEM_MC_ELSE() {
5443 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5444 } IEM_MC_ENDIF();
5445 IEM_MC_ADVANCE_RIP();
5446 IEM_MC_END();
5447 }
5448 else
5449 {
5450 /* memory target */
5451 IEM_MC_BEGIN(0, 1);
5452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5455 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5456 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5457 } IEM_MC_ELSE() {
5458 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5459 } IEM_MC_ENDIF();
5460 IEM_MC_ADVANCE_RIP();
5461 IEM_MC_END();
5462 }
5463 return VINF_SUCCESS;
5464}
5465
5466
5467/** Opcode 0x0f 0x94. */
5468FNIEMOP_DEF(iemOp_sete_Eb)
5469{
5470 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5471 IEMOP_HLP_MIN_386();
5472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5473
5474 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5475 * any way. AMD says it's "unused", whatever that means. We're
5476 * ignoring for now. */
5477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5478 {
5479 /* register target */
5480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5481 IEM_MC_BEGIN(0, 0);
5482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5483 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5484 } IEM_MC_ELSE() {
5485 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5486 } IEM_MC_ENDIF();
5487 IEM_MC_ADVANCE_RIP();
5488 IEM_MC_END();
5489 }
5490 else
5491 {
5492 /* memory target */
5493 IEM_MC_BEGIN(0, 1);
5494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5497 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5498 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5499 } IEM_MC_ELSE() {
5500 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5501 } IEM_MC_ENDIF();
5502 IEM_MC_ADVANCE_RIP();
5503 IEM_MC_END();
5504 }
5505 return VINF_SUCCESS;
5506}
5507
5508
5509/** Opcode 0x0f 0x95. */
5510FNIEMOP_DEF(iemOp_setne_Eb)
5511{
5512 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5513 IEMOP_HLP_MIN_386();
5514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5515
5516 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5517 * any way. AMD says it's "unused", whatever that means. We're
5518 * ignoring for now. */
5519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5520 {
5521 /* register target */
5522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5523 IEM_MC_BEGIN(0, 0);
5524 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5525 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5526 } IEM_MC_ELSE() {
5527 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5528 } IEM_MC_ENDIF();
5529 IEM_MC_ADVANCE_RIP();
5530 IEM_MC_END();
5531 }
5532 else
5533 {
5534 /* memory target */
5535 IEM_MC_BEGIN(0, 1);
5536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5540 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5541 } IEM_MC_ELSE() {
5542 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5543 } IEM_MC_ENDIF();
5544 IEM_MC_ADVANCE_RIP();
5545 IEM_MC_END();
5546 }
5547 return VINF_SUCCESS;
5548}
5549
5550
5551/** Opcode 0x0f 0x96. */
5552FNIEMOP_DEF(iemOp_setbe_Eb)
5553{
5554 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5555 IEMOP_HLP_MIN_386();
5556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5557
5558 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5559 * any way. AMD says it's "unused", whatever that means. We're
5560 * ignoring for now. */
5561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5562 {
5563 /* register target */
5564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5565 IEM_MC_BEGIN(0, 0);
5566 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5567 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5568 } IEM_MC_ELSE() {
5569 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5570 } IEM_MC_ENDIF();
5571 IEM_MC_ADVANCE_RIP();
5572 IEM_MC_END();
5573 }
5574 else
5575 {
5576 /* memory target */
5577 IEM_MC_BEGIN(0, 1);
5578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5581 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5582 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5583 } IEM_MC_ELSE() {
5584 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5585 } IEM_MC_ENDIF();
5586 IEM_MC_ADVANCE_RIP();
5587 IEM_MC_END();
5588 }
5589 return VINF_SUCCESS;
5590}
5591
5592
5593/** Opcode 0x0f 0x97. */
5594FNIEMOP_DEF(iemOp_setnbe_Eb)
5595{
5596 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5597 IEMOP_HLP_MIN_386();
5598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5599
5600 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5601 * any way. AMD says it's "unused", whatever that means. We're
5602 * ignoring for now. */
5603 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5604 {
5605 /* register target */
5606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5607 IEM_MC_BEGIN(0, 0);
5608 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5609 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5610 } IEM_MC_ELSE() {
5611 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5612 } IEM_MC_ENDIF();
5613 IEM_MC_ADVANCE_RIP();
5614 IEM_MC_END();
5615 }
5616 else
5617 {
5618 /* memory target */
5619 IEM_MC_BEGIN(0, 1);
5620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5623 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5624 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5625 } IEM_MC_ELSE() {
5626 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5627 } IEM_MC_ENDIF();
5628 IEM_MC_ADVANCE_RIP();
5629 IEM_MC_END();
5630 }
5631 return VINF_SUCCESS;
5632}
5633
5634
5635/** Opcode 0x0f 0x98. */
5636FNIEMOP_DEF(iemOp_sets_Eb)
5637{
5638 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5639 IEMOP_HLP_MIN_386();
5640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5641
5642 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5643 * any way. AMD says it's "unused", whatever that means. We're
5644 * ignoring for now. */
5645 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5646 {
5647 /* register target */
5648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5649 IEM_MC_BEGIN(0, 0);
5650 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5651 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5652 } IEM_MC_ELSE() {
5653 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5654 } IEM_MC_ENDIF();
5655 IEM_MC_ADVANCE_RIP();
5656 IEM_MC_END();
5657 }
5658 else
5659 {
5660 /* memory target */
5661 IEM_MC_BEGIN(0, 1);
5662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5665 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5666 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5667 } IEM_MC_ELSE() {
5668 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5669 } IEM_MC_ENDIF();
5670 IEM_MC_ADVANCE_RIP();
5671 IEM_MC_END();
5672 }
5673 return VINF_SUCCESS;
5674}
5675
5676
5677/** Opcode 0x0f 0x99. */
5678FNIEMOP_DEF(iemOp_setns_Eb)
5679{
5680 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5681 IEMOP_HLP_MIN_386();
5682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5683
5684 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5685 * any way. AMD says it's "unused", whatever that means. We're
5686 * ignoring for now. */
5687 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5688 {
5689 /* register target */
5690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5691 IEM_MC_BEGIN(0, 0);
5692 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5693 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5694 } IEM_MC_ELSE() {
5695 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5696 } IEM_MC_ENDIF();
5697 IEM_MC_ADVANCE_RIP();
5698 IEM_MC_END();
5699 }
5700 else
5701 {
5702 /* memory target */
5703 IEM_MC_BEGIN(0, 1);
5704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5707 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5708 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5709 } IEM_MC_ELSE() {
5710 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5711 } IEM_MC_ENDIF();
5712 IEM_MC_ADVANCE_RIP();
5713 IEM_MC_END();
5714 }
5715 return VINF_SUCCESS;
5716}
5717
5718
5719/** Opcode 0x0f 0x9a. */
5720FNIEMOP_DEF(iemOp_setp_Eb)
5721{
5722 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5723 IEMOP_HLP_MIN_386();
5724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5725
5726 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5727 * any way. AMD says it's "unused", whatever that means. We're
5728 * ignoring for now. */
5729 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5730 {
5731 /* register target */
5732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5733 IEM_MC_BEGIN(0, 0);
5734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5735 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5736 } IEM_MC_ELSE() {
5737 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5738 } IEM_MC_ENDIF();
5739 IEM_MC_ADVANCE_RIP();
5740 IEM_MC_END();
5741 }
5742 else
5743 {
5744 /* memory target */
5745 IEM_MC_BEGIN(0, 1);
5746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5749 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5750 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5751 } IEM_MC_ELSE() {
5752 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5753 } IEM_MC_ENDIF();
5754 IEM_MC_ADVANCE_RIP();
5755 IEM_MC_END();
5756 }
5757 return VINF_SUCCESS;
5758}
5759
5760
5761/** Opcode 0x0f 0x9b. */
5762FNIEMOP_DEF(iemOp_setnp_Eb)
5763{
5764 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5765 IEMOP_HLP_MIN_386();
5766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5767
5768 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5769 * any way. AMD says it's "unused", whatever that means. We're
5770 * ignoring for now. */
5771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5772 {
5773 /* register target */
5774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5775 IEM_MC_BEGIN(0, 0);
5776 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5777 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5778 } IEM_MC_ELSE() {
5779 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5780 } IEM_MC_ENDIF();
5781 IEM_MC_ADVANCE_RIP();
5782 IEM_MC_END();
5783 }
5784 else
5785 {
5786 /* memory target */
5787 IEM_MC_BEGIN(0, 1);
5788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5791 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5792 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5793 } IEM_MC_ELSE() {
5794 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5795 } IEM_MC_ENDIF();
5796 IEM_MC_ADVANCE_RIP();
5797 IEM_MC_END();
5798 }
5799 return VINF_SUCCESS;
5800}
5801
5802
5803/** Opcode 0x0f 0x9c. */
5804FNIEMOP_DEF(iemOp_setl_Eb)
5805{
5806 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5807 IEMOP_HLP_MIN_386();
5808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5809
5810 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5811 * any way. AMD says it's "unused", whatever that means. We're
5812 * ignoring for now. */
5813 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5814 {
5815 /* register target */
5816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5817 IEM_MC_BEGIN(0, 0);
5818 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5819 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5820 } IEM_MC_ELSE() {
5821 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5822 } IEM_MC_ENDIF();
5823 IEM_MC_ADVANCE_RIP();
5824 IEM_MC_END();
5825 }
5826 else
5827 {
5828 /* memory target */
5829 IEM_MC_BEGIN(0, 1);
5830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5833 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5834 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5835 } IEM_MC_ELSE() {
5836 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5837 } IEM_MC_ENDIF();
5838 IEM_MC_ADVANCE_RIP();
5839 IEM_MC_END();
5840 }
5841 return VINF_SUCCESS;
5842}
5843
5844
5845/** Opcode 0x0f 0x9d. */
5846FNIEMOP_DEF(iemOp_setnl_Eb)
5847{
5848 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5849 IEMOP_HLP_MIN_386();
5850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5851
5852 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5853 * any way. AMD says it's "unused", whatever that means. We're
5854 * ignoring for now. */
5855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5856 {
5857 /* register target */
5858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5859 IEM_MC_BEGIN(0, 0);
5860 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5861 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5862 } IEM_MC_ELSE() {
5863 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5864 } IEM_MC_ENDIF();
5865 IEM_MC_ADVANCE_RIP();
5866 IEM_MC_END();
5867 }
5868 else
5869 {
5870 /* memory target */
5871 IEM_MC_BEGIN(0, 1);
5872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5875 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5876 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5877 } IEM_MC_ELSE() {
5878 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5879 } IEM_MC_ENDIF();
5880 IEM_MC_ADVANCE_RIP();
5881 IEM_MC_END();
5882 }
5883 return VINF_SUCCESS;
5884}
5885
5886
5887/** Opcode 0x0f 0x9e. */
5888FNIEMOP_DEF(iemOp_setle_Eb)
5889{
5890 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5891 IEMOP_HLP_MIN_386();
5892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5893
5894 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5895 * any way. AMD says it's "unused", whatever that means. We're
5896 * ignoring for now. */
5897 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5898 {
5899 /* register target */
5900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5901 IEM_MC_BEGIN(0, 0);
5902 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5903 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5904 } IEM_MC_ELSE() {
5905 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5906 } IEM_MC_ENDIF();
5907 IEM_MC_ADVANCE_RIP();
5908 IEM_MC_END();
5909 }
5910 else
5911 {
5912 /* memory target */
5913 IEM_MC_BEGIN(0, 1);
5914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5917 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5918 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5919 } IEM_MC_ELSE() {
5920 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5921 } IEM_MC_ENDIF();
5922 IEM_MC_ADVANCE_RIP();
5923 IEM_MC_END();
5924 }
5925 return VINF_SUCCESS;
5926}
5927
5928
5929/** Opcode 0x0f 0x9f. */
5930FNIEMOP_DEF(iemOp_setnle_Eb)
5931{
5932 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5933 IEMOP_HLP_MIN_386();
5934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5935
5936 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5937 * any way. AMD says it's "unused", whatever that means. We're
5938 * ignoring for now. */
5939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5940 {
5941 /* register target */
5942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5943 IEM_MC_BEGIN(0, 0);
5944 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5945 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5946 } IEM_MC_ELSE() {
5947 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5948 } IEM_MC_ENDIF();
5949 IEM_MC_ADVANCE_RIP();
5950 IEM_MC_END();
5951 }
5952 else
5953 {
5954 /* memory target */
5955 IEM_MC_BEGIN(0, 1);
5956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5959 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5960 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5961 } IEM_MC_ELSE() {
5962 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5963 } IEM_MC_ENDIF();
5964 IEM_MC_ADVANCE_RIP();
5965 IEM_MC_END();
5966 }
5967 return VINF_SUCCESS;
5968}
5969
5970
5971/**
5972 * Common 'push segment-register' helper.
5973 */
5974FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5975{
5976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5977 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5978 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5979
5980 switch (pVCpu->iem.s.enmEffOpSize)
5981 {
5982 case IEMMODE_16BIT:
5983 IEM_MC_BEGIN(0, 1);
5984 IEM_MC_LOCAL(uint16_t, u16Value);
5985 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5986 IEM_MC_PUSH_U16(u16Value);
5987 IEM_MC_ADVANCE_RIP();
5988 IEM_MC_END();
5989 break;
5990
5991 case IEMMODE_32BIT:
5992 IEM_MC_BEGIN(0, 1);
5993 IEM_MC_LOCAL(uint32_t, u32Value);
5994 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5995 IEM_MC_PUSH_U32_SREG(u32Value);
5996 IEM_MC_ADVANCE_RIP();
5997 IEM_MC_END();
5998 break;
5999
6000 case IEMMODE_64BIT:
6001 IEM_MC_BEGIN(0, 1);
6002 IEM_MC_LOCAL(uint64_t, u64Value);
6003 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6004 IEM_MC_PUSH_U64(u64Value);
6005 IEM_MC_ADVANCE_RIP();
6006 IEM_MC_END();
6007 break;
6008 }
6009
6010 return VINF_SUCCESS;
6011}
6012
6013
6014/** Opcode 0x0f 0xa0. */
6015FNIEMOP_DEF(iemOp_push_fs)
6016{
6017 IEMOP_MNEMONIC(push_fs, "push fs");
6018 IEMOP_HLP_MIN_386();
6019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6020 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6021}
6022
6023
6024/** Opcode 0x0f 0xa1. */
6025FNIEMOP_DEF(iemOp_pop_fs)
6026{
6027 IEMOP_MNEMONIC(pop_fs, "pop fs");
6028 IEMOP_HLP_MIN_386();
6029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6030 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6031}
6032
6033
6034/** Opcode 0x0f 0xa2. */
6035FNIEMOP_DEF(iemOp_cpuid)
6036{
6037 IEMOP_MNEMONIC(cpuid, "cpuid");
6038 IEMOP_HLP_MIN_486(); /* not all 486es. */
6039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6040 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6041}
6042
6043
6044/**
6045 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6046 * iemOp_bts_Ev_Gv.
6047 */
6048FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6049{
6050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6051 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6052
6053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6054 {
6055 /* register destination. */
6056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6057 switch (pVCpu->iem.s.enmEffOpSize)
6058 {
6059 case IEMMODE_16BIT:
6060 IEM_MC_BEGIN(3, 0);
6061 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6062 IEM_MC_ARG(uint16_t, u16Src, 1);
6063 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6064
6065 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6066 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6067 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6068 IEM_MC_REF_EFLAGS(pEFlags);
6069 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6070
6071 IEM_MC_ADVANCE_RIP();
6072 IEM_MC_END();
6073 return VINF_SUCCESS;
6074
6075 case IEMMODE_32BIT:
6076 IEM_MC_BEGIN(3, 0);
6077 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6078 IEM_MC_ARG(uint32_t, u32Src, 1);
6079 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6080
6081 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6082 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6083 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6084 IEM_MC_REF_EFLAGS(pEFlags);
6085 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6086
6087 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6088 IEM_MC_ADVANCE_RIP();
6089 IEM_MC_END();
6090 return VINF_SUCCESS;
6091
6092 case IEMMODE_64BIT:
6093 IEM_MC_BEGIN(3, 0);
6094 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6095 IEM_MC_ARG(uint64_t, u64Src, 1);
6096 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6097
6098 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6099 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6100 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6101 IEM_MC_REF_EFLAGS(pEFlags);
6102 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6103
6104 IEM_MC_ADVANCE_RIP();
6105 IEM_MC_END();
6106 return VINF_SUCCESS;
6107
6108 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6109 }
6110 }
6111 else
6112 {
6113 /* memory destination. */
6114
6115 uint32_t fAccess;
6116 if (pImpl->pfnLockedU16)
6117 fAccess = IEM_ACCESS_DATA_RW;
6118 else /* BT */
6119 fAccess = IEM_ACCESS_DATA_R;
6120
6121 /** @todo test negative bit offsets! */
6122 switch (pVCpu->iem.s.enmEffOpSize)
6123 {
6124 case IEMMODE_16BIT:
6125 IEM_MC_BEGIN(3, 2);
6126 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6127 IEM_MC_ARG(uint16_t, u16Src, 1);
6128 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6130 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6131
6132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6133 if (pImpl->pfnLockedU16)
6134 IEMOP_HLP_DONE_DECODING();
6135 else
6136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6137 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6138 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6139 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6140 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6141 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6142 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6143 IEM_MC_FETCH_EFLAGS(EFlags);
6144
6145 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6146 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6148 else
6149 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6150 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6151
6152 IEM_MC_COMMIT_EFLAGS(EFlags);
6153 IEM_MC_ADVANCE_RIP();
6154 IEM_MC_END();
6155 return VINF_SUCCESS;
6156
6157 case IEMMODE_32BIT:
6158 IEM_MC_BEGIN(3, 2);
6159 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6160 IEM_MC_ARG(uint32_t, u32Src, 1);
6161 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6163 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6164
6165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6166 if (pImpl->pfnLockedU16)
6167 IEMOP_HLP_DONE_DECODING();
6168 else
6169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6170 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6171 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6172 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6173 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6174 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6175 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6176 IEM_MC_FETCH_EFLAGS(EFlags);
6177
6178 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6179 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6181 else
6182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6183 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6184
6185 IEM_MC_COMMIT_EFLAGS(EFlags);
6186 IEM_MC_ADVANCE_RIP();
6187 IEM_MC_END();
6188 return VINF_SUCCESS;
6189
6190 case IEMMODE_64BIT:
6191 IEM_MC_BEGIN(3, 2);
6192 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6193 IEM_MC_ARG(uint64_t, u64Src, 1);
6194 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6196 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6197
6198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6199 if (pImpl->pfnLockedU16)
6200 IEMOP_HLP_DONE_DECODING();
6201 else
6202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6203 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6204 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6205 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6206 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6207 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6208 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6209 IEM_MC_FETCH_EFLAGS(EFlags);
6210
6211 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6212 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6213 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6214 else
6215 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6216 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6217
6218 IEM_MC_COMMIT_EFLAGS(EFlags);
6219 IEM_MC_ADVANCE_RIP();
6220 IEM_MC_END();
6221 return VINF_SUCCESS;
6222
6223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6224 }
6225 }
6226}
6227
6228
6229/** Opcode 0x0f 0xa3. */
6230FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6231{
6232 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6233 IEMOP_HLP_MIN_386();
6234 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6235}
6236
6237
6238/**
6239 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6240 */
6241FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6242{
6243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6245
6246 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6247 {
6248 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6250
6251 switch (pVCpu->iem.s.enmEffOpSize)
6252 {
6253 case IEMMODE_16BIT:
6254 IEM_MC_BEGIN(4, 0);
6255 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6256 IEM_MC_ARG(uint16_t, u16Src, 1);
6257 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6258 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6259
6260 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6261 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6262 IEM_MC_REF_EFLAGS(pEFlags);
6263 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6264
6265 IEM_MC_ADVANCE_RIP();
6266 IEM_MC_END();
6267 return VINF_SUCCESS;
6268
6269 case IEMMODE_32BIT:
6270 IEM_MC_BEGIN(4, 0);
6271 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6272 IEM_MC_ARG(uint32_t, u32Src, 1);
6273 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6274 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6275
6276 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6277 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6278 IEM_MC_REF_EFLAGS(pEFlags);
6279 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6280
6281 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6282 IEM_MC_ADVANCE_RIP();
6283 IEM_MC_END();
6284 return VINF_SUCCESS;
6285
6286 case IEMMODE_64BIT:
6287 IEM_MC_BEGIN(4, 0);
6288 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6289 IEM_MC_ARG(uint64_t, u64Src, 1);
6290 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6291 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6292
6293 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6294 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6295 IEM_MC_REF_EFLAGS(pEFlags);
6296 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6297
6298 IEM_MC_ADVANCE_RIP();
6299 IEM_MC_END();
6300 return VINF_SUCCESS;
6301
6302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6303 }
6304 }
6305 else
6306 {
6307 switch (pVCpu->iem.s.enmEffOpSize)
6308 {
6309 case IEMMODE_16BIT:
6310 IEM_MC_BEGIN(4, 2);
6311 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6312 IEM_MC_ARG(uint16_t, u16Src, 1);
6313 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6314 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6316
6317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6318 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6319 IEM_MC_ASSIGN(cShiftArg, cShift);
6320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6321 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6322 IEM_MC_FETCH_EFLAGS(EFlags);
6323 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6324 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6325
6326 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6327 IEM_MC_COMMIT_EFLAGS(EFlags);
6328 IEM_MC_ADVANCE_RIP();
6329 IEM_MC_END();
6330 return VINF_SUCCESS;
6331
6332 case IEMMODE_32BIT:
6333 IEM_MC_BEGIN(4, 2);
6334 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6335 IEM_MC_ARG(uint32_t, u32Src, 1);
6336 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6337 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6339
6340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6341 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6342 IEM_MC_ASSIGN(cShiftArg, cShift);
6343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6344 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6345 IEM_MC_FETCH_EFLAGS(EFlags);
6346 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6347 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6348
6349 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6350 IEM_MC_COMMIT_EFLAGS(EFlags);
6351 IEM_MC_ADVANCE_RIP();
6352 IEM_MC_END();
6353 return VINF_SUCCESS;
6354
6355 case IEMMODE_64BIT:
6356 IEM_MC_BEGIN(4, 2);
6357 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6358 IEM_MC_ARG(uint64_t, u64Src, 1);
6359 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6360 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6362
6363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6364 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6365 IEM_MC_ASSIGN(cShiftArg, cShift);
6366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6367 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6368 IEM_MC_FETCH_EFLAGS(EFlags);
6369 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6370 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6371
6372 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6373 IEM_MC_COMMIT_EFLAGS(EFlags);
6374 IEM_MC_ADVANCE_RIP();
6375 IEM_MC_END();
6376 return VINF_SUCCESS;
6377
6378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6379 }
6380 }
6381}
6382
6383
6384/**
6385 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6386 */
6387FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6388{
6389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6390 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6391
6392 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6393 {
6394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6395
6396 switch (pVCpu->iem.s.enmEffOpSize)
6397 {
6398 case IEMMODE_16BIT:
6399 IEM_MC_BEGIN(4, 0);
6400 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6401 IEM_MC_ARG(uint16_t, u16Src, 1);
6402 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6403 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6404
6405 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6406 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6407 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6408 IEM_MC_REF_EFLAGS(pEFlags);
6409 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6410
6411 IEM_MC_ADVANCE_RIP();
6412 IEM_MC_END();
6413 return VINF_SUCCESS;
6414
6415 case IEMMODE_32BIT:
6416 IEM_MC_BEGIN(4, 0);
6417 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6418 IEM_MC_ARG(uint32_t, u32Src, 1);
6419 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6420 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6421
6422 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6423 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6424 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6425 IEM_MC_REF_EFLAGS(pEFlags);
6426 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6427
6428 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6429 IEM_MC_ADVANCE_RIP();
6430 IEM_MC_END();
6431 return VINF_SUCCESS;
6432
6433 case IEMMODE_64BIT:
6434 IEM_MC_BEGIN(4, 0);
6435 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6436 IEM_MC_ARG(uint64_t, u64Src, 1);
6437 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6438 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6439
6440 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6441 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6442 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6443 IEM_MC_REF_EFLAGS(pEFlags);
6444 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6445
6446 IEM_MC_ADVANCE_RIP();
6447 IEM_MC_END();
6448 return VINF_SUCCESS;
6449
6450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6451 }
6452 }
6453 else
6454 {
6455 switch (pVCpu->iem.s.enmEffOpSize)
6456 {
6457 case IEMMODE_16BIT:
6458 IEM_MC_BEGIN(4, 2);
6459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6460 IEM_MC_ARG(uint16_t, u16Src, 1);
6461 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6462 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6464
6465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6467 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6468 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6469 IEM_MC_FETCH_EFLAGS(EFlags);
6470 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6471 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6472
6473 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6474 IEM_MC_COMMIT_EFLAGS(EFlags);
6475 IEM_MC_ADVANCE_RIP();
6476 IEM_MC_END();
6477 return VINF_SUCCESS;
6478
6479 case IEMMODE_32BIT:
6480 IEM_MC_BEGIN(4, 2);
6481 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6482 IEM_MC_ARG(uint32_t, u32Src, 1);
6483 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6484 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6486
6487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6489 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6490 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6491 IEM_MC_FETCH_EFLAGS(EFlags);
6492 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6493 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6494
6495 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6496 IEM_MC_COMMIT_EFLAGS(EFlags);
6497 IEM_MC_ADVANCE_RIP();
6498 IEM_MC_END();
6499 return VINF_SUCCESS;
6500
6501 case IEMMODE_64BIT:
6502 IEM_MC_BEGIN(4, 2);
6503 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6504 IEM_MC_ARG(uint64_t, u64Src, 1);
6505 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6506 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6508
6509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6511 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6512 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6513 IEM_MC_FETCH_EFLAGS(EFlags);
6514 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6515 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6516
6517 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6518 IEM_MC_COMMIT_EFLAGS(EFlags);
6519 IEM_MC_ADVANCE_RIP();
6520 IEM_MC_END();
6521 return VINF_SUCCESS;
6522
6523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6524 }
6525 }
6526}
6527
6528
6529
6530/** Opcode 0x0f 0xa4. */
6531FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6532{
6533 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6534 IEMOP_HLP_MIN_386();
6535 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6536}
6537
6538
6539/** Opcode 0x0f 0xa5. */
6540FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6541{
6542 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6543 IEMOP_HLP_MIN_386();
6544 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6545}
6546
6547
6548/** Opcode 0x0f 0xa8. */
6549FNIEMOP_DEF(iemOp_push_gs)
6550{
6551 IEMOP_MNEMONIC(push_gs, "push gs");
6552 IEMOP_HLP_MIN_386();
6553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6554 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6555}
6556
6557
6558/** Opcode 0x0f 0xa9. */
6559FNIEMOP_DEF(iemOp_pop_gs)
6560{
6561 IEMOP_MNEMONIC(pop_gs, "pop gs");
6562 IEMOP_HLP_MIN_386();
6563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6564 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6565}
6566
6567
6568/** Opcode 0x0f 0xaa. */
6569FNIEMOP_DEF(iemOp_rsm)
6570{
6571 IEMOP_MNEMONIC(rsm, "rsm");
6572 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6573 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6574 * intercept). */
6575 IEMOP_BITCH_ABOUT_STUB();
6576 return IEMOP_RAISE_INVALID_OPCODE();
6577}
6578
6579//IEMOP_HLP_MIN_386();
6580
6581
6582/** Opcode 0x0f 0xab. */
6583FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6584{
6585 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6586 IEMOP_HLP_MIN_386();
6587 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6588}
6589
6590
6591/** Opcode 0x0f 0xac. */
6592FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6593{
6594 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6595 IEMOP_HLP_MIN_386();
6596 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6597}
6598
6599
6600/** Opcode 0x0f 0xad. */
6601FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6602{
6603 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6604 IEMOP_HLP_MIN_386();
6605 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6606}
6607
6608
6609/** Opcode 0x0f 0xae mem/0. */
6610FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6611{
6612 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6613 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6614 return IEMOP_RAISE_INVALID_OPCODE();
6615
6616 IEM_MC_BEGIN(3, 1);
6617 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6618 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6619 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6622 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6623 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6624 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6625 IEM_MC_END();
6626 return VINF_SUCCESS;
6627}
6628
6629
6630/** Opcode 0x0f 0xae mem/1. */
6631FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6632{
6633 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6634 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6635 return IEMOP_RAISE_INVALID_OPCODE();
6636
6637 IEM_MC_BEGIN(3, 1);
6638 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6639 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6640 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6643 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6644 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6645 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6646 IEM_MC_END();
6647 return VINF_SUCCESS;
6648}
6649
6650
6651/**
6652 * @opmaps grp15
6653 * @opcode !11/2
6654 * @oppfx none
6655 * @opcpuid sse
6656 * @opgroup og_sse_mxcsrsm
6657 * @opxcpttype 5
6658 * @optest op1=0 -> mxcsr=0
6659 * @optest op1=0x2083 -> mxcsr=0x2083
6660 * @optest op1=0xfffffffe -> value.xcpt=0xd
6661 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6662 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6663 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6664 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6665 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6666 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6667 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6668 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6669 */
6670FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6671{
6672 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6673 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6674 return IEMOP_RAISE_INVALID_OPCODE();
6675
6676 IEM_MC_BEGIN(2, 0);
6677 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6678 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6681 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6682 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6683 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6684 IEM_MC_END();
6685 return VINF_SUCCESS;
6686}
6687
6688
6689/**
6690 * @opmaps grp15
6691 * @opcode !11/3
6692 * @oppfx none
6693 * @opcpuid sse
6694 * @opgroup og_sse_mxcsrsm
6695 * @opxcpttype 5
6696 * @optest mxcsr=0 -> op1=0
6697 * @optest mxcsr=0x2083 -> op1=0x2083
6698 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6699 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6700 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6701 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6702 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6703 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6704 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6705 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6706 */
6707FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6708{
6709 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6710 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6711 return IEMOP_RAISE_INVALID_OPCODE();
6712
6713 IEM_MC_BEGIN(2, 0);
6714 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6715 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6719 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6720 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6721 IEM_MC_END();
6722 return VINF_SUCCESS;
6723}
6724
6725
6726/**
6727 * @opmaps grp15
6728 * @opcode !11/4
6729 * @oppfx none
6730 * @opcpuid xsave
6731 * @opgroup og_system
6732 * @opxcpttype none
6733 */
6734FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6735{
6736 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6737 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6738 return IEMOP_RAISE_INVALID_OPCODE();
6739
6740 IEM_MC_BEGIN(3, 0);
6741 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6742 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6743 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6746 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6747 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6748 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6749 IEM_MC_END();
6750 return VINF_SUCCESS;
6751}
6752
6753
6754/**
6755 * @opmaps grp15
6756 * @opcode !11/5
6757 * @oppfx none
6758 * @opcpuid xsave
6759 * @opgroup og_system
6760 * @opxcpttype none
6761 */
6762FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6763{
6764 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6765 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6766 return IEMOP_RAISE_INVALID_OPCODE();
6767
6768 IEM_MC_BEGIN(3, 0);
6769 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6770 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6771 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6774 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6775 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6776 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6777 IEM_MC_END();
6778 return VINF_SUCCESS;
6779}
6780
6781/** Opcode 0x0f 0xae mem/6. */
6782FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6783
6784/**
6785 * @opmaps grp15
6786 * @opcode !11/7
6787 * @oppfx none
6788 * @opcpuid clfsh
6789 * @opgroup og_cachectl
6790 * @optest op1=1 ->
6791 */
6792FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6793{
6794 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6795 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6796 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6797
6798 IEM_MC_BEGIN(2, 0);
6799 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6800 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6803 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6804 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6805 IEM_MC_END();
6806 return VINF_SUCCESS;
6807}
6808
6809/**
6810 * @opmaps grp15
6811 * @opcode !11/7
6812 * @oppfx 0x66
6813 * @opcpuid clflushopt
6814 * @opgroup og_cachectl
6815 * @optest op1=1 ->
6816 */
6817FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6818{
6819 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6820 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6821 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6822
6823 IEM_MC_BEGIN(2, 0);
6824 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6825 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6828 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6829 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6830 IEM_MC_END();
6831 return VINF_SUCCESS;
6832}
6833
6834
6835/** Opcode 0x0f 0xae 11b/5. */
6836FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6837{
6838 RT_NOREF_PV(bRm);
6839 IEMOP_MNEMONIC(lfence, "lfence");
6840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6841 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6842 return IEMOP_RAISE_INVALID_OPCODE();
6843
6844 IEM_MC_BEGIN(0, 0);
6845 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6846 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6847 else
6848 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6849 IEM_MC_ADVANCE_RIP();
6850 IEM_MC_END();
6851 return VINF_SUCCESS;
6852}
6853
6854
6855/** Opcode 0x0f 0xae 11b/6. */
6856FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6857{
6858 RT_NOREF_PV(bRm);
6859 IEMOP_MNEMONIC(mfence, "mfence");
6860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6861 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6862 return IEMOP_RAISE_INVALID_OPCODE();
6863
6864 IEM_MC_BEGIN(0, 0);
6865 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6866 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6867 else
6868 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6869 IEM_MC_ADVANCE_RIP();
6870 IEM_MC_END();
6871 return VINF_SUCCESS;
6872}
6873
6874
6875/** Opcode 0x0f 0xae 11b/7. */
6876FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6877{
6878 RT_NOREF_PV(bRm);
6879 IEMOP_MNEMONIC(sfence, "sfence");
6880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6881 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6882 return IEMOP_RAISE_INVALID_OPCODE();
6883
6884 IEM_MC_BEGIN(0, 0);
6885 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6886 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6887 else
6888 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6889 IEM_MC_ADVANCE_RIP();
6890 IEM_MC_END();
6891 return VINF_SUCCESS;
6892}
6893
6894
6895/** Opcode 0xf3 0x0f 0xae 11b/0. */
6896FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
6897{
6898 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
6899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6900 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6901 {
6902 IEM_MC_BEGIN(1, 0);
6903 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6904 IEM_MC_ARG(uint64_t, u64Dst, 0);
6905 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
6906 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6907 IEM_MC_ADVANCE_RIP();
6908 IEM_MC_END();
6909 }
6910 else
6911 {
6912 IEM_MC_BEGIN(1, 0);
6913 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6914 IEM_MC_ARG(uint32_t, u32Dst, 0);
6915 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
6916 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6917 IEM_MC_ADVANCE_RIP();
6918 IEM_MC_END();
6919 }
6920 return VINF_SUCCESS;
6921}
6922
6923/** Opcode 0xf3 0x0f 0xae 11b/1. */
6924FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
6925{
6926 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
6927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6928 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6929 {
6930 IEM_MC_BEGIN(1, 0);
6931 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6932 IEM_MC_ARG(uint64_t, u64Dst, 0);
6933 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
6934 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6935 IEM_MC_ADVANCE_RIP();
6936 IEM_MC_END();
6937 }
6938 else
6939 {
6940 IEM_MC_BEGIN(1, 0);
6941 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6942 IEM_MC_ARG(uint32_t, u32Dst, 0);
6943 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
6944 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6945 IEM_MC_ADVANCE_RIP();
6946 IEM_MC_END();
6947 }
6948 return VINF_SUCCESS;
6949}
6950
6951/** Opcode 0xf3 0x0f 0xae 11b/2. */
6952FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
6953{
6954 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6957 {
6958 IEM_MC_BEGIN(1, 0);
6959 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6960 IEM_MC_ARG(uint64_t, u64Dst, 0);
6961 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6962 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
6963 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
6964 IEM_MC_ADVANCE_RIP();
6965 IEM_MC_END();
6966 }
6967 else
6968 {
6969 IEM_MC_BEGIN(1, 0);
6970 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6971 IEM_MC_ARG(uint32_t, u32Dst, 0);
6972 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6973 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
6974 IEM_MC_ADVANCE_RIP();
6975 IEM_MC_END();
6976 }
6977 return VINF_SUCCESS;
6978}
6979
6980/** Opcode 0xf3 0x0f 0xae 11b/3. */
6981FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
6982{
6983 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
6984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6985 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6986 {
6987 IEM_MC_BEGIN(1, 0);
6988 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6989 IEM_MC_ARG(uint64_t, u64Dst, 0);
6990 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6991 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
6992 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
6993 IEM_MC_ADVANCE_RIP();
6994 IEM_MC_END();
6995 }
6996 else
6997 {
6998 IEM_MC_BEGIN(1, 0);
6999 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7000 IEM_MC_ARG(uint32_t, u32Dst, 0);
7001 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7002 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7003 IEM_MC_ADVANCE_RIP();
7004 IEM_MC_END();
7005 }
7006 return VINF_SUCCESS;
7007}
7008
7009
7010/**
7011 * Group 15 jump table for register variant.
7012 */
7013IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7014{ /* pfx: none, 066h, 0f3h, 0f2h */
7015 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7016 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7017 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7018 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7019 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7020 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7021 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7022 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7023};
7024AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7025
7026
7027/**
7028 * Group 15 jump table for memory variant.
7029 */
7030IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7031{ /* pfx: none, 066h, 0f3h, 0f2h */
7032 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7033 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7034 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7035 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7036 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7037 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7038 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7039 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7040};
7041AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7042
7043
7044/** Opcode 0x0f 0xae. */
7045FNIEMOP_DEF(iemOp_Grp15)
7046{
7047 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7049 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7050 /* register, register */
7051 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7052 + pVCpu->iem.s.idxPrefix], bRm);
7053 /* memory, register */
7054 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7055 + pVCpu->iem.s.idxPrefix], bRm);
7056}
7057
7058
7059/** Opcode 0x0f 0xaf. */
7060FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7061{
7062 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7063 IEMOP_HLP_MIN_386();
7064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7065 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7066}
7067
7068
7069/** Opcode 0x0f 0xb0. */
7070FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7071{
7072 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7073 IEMOP_HLP_MIN_486();
7074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7075
7076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7077 {
7078 IEMOP_HLP_DONE_DECODING();
7079 IEM_MC_BEGIN(4, 0);
7080 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7081 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7082 IEM_MC_ARG(uint8_t, u8Src, 2);
7083 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7084
7085 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7086 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7087 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7088 IEM_MC_REF_EFLAGS(pEFlags);
7089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7090 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7091 else
7092 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7093
7094 IEM_MC_ADVANCE_RIP();
7095 IEM_MC_END();
7096 }
7097 else
7098 {
7099 IEM_MC_BEGIN(4, 3);
7100 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7101 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7102 IEM_MC_ARG(uint8_t, u8Src, 2);
7103 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7105 IEM_MC_LOCAL(uint8_t, u8Al);
7106
7107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7108 IEMOP_HLP_DONE_DECODING();
7109 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7110 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7111 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7112 IEM_MC_FETCH_EFLAGS(EFlags);
7113 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7114 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7115 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7116 else
7117 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7118
7119 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7120 IEM_MC_COMMIT_EFLAGS(EFlags);
7121 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7122 IEM_MC_ADVANCE_RIP();
7123 IEM_MC_END();
7124 }
7125 return VINF_SUCCESS;
7126}
7127
7128/** Opcode 0x0f 0xb1. */
7129FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7130{
7131 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7132 IEMOP_HLP_MIN_486();
7133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7134
7135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7136 {
7137 IEMOP_HLP_DONE_DECODING();
7138 switch (pVCpu->iem.s.enmEffOpSize)
7139 {
7140 case IEMMODE_16BIT:
7141 IEM_MC_BEGIN(4, 0);
7142 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7143 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7144 IEM_MC_ARG(uint16_t, u16Src, 2);
7145 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7146
7147 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7148 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7149 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7150 IEM_MC_REF_EFLAGS(pEFlags);
7151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7152 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7153 else
7154 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7155
7156 IEM_MC_ADVANCE_RIP();
7157 IEM_MC_END();
7158 return VINF_SUCCESS;
7159
7160 case IEMMODE_32BIT:
7161 IEM_MC_BEGIN(4, 0);
7162 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7163 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7164 IEM_MC_ARG(uint32_t, u32Src, 2);
7165 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7166
7167 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7168 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7169 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7170 IEM_MC_REF_EFLAGS(pEFlags);
7171 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7172 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7173 else
7174 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7175
7176 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7177 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7178 IEM_MC_ADVANCE_RIP();
7179 IEM_MC_END();
7180 return VINF_SUCCESS;
7181
7182 case IEMMODE_64BIT:
7183 IEM_MC_BEGIN(4, 0);
7184 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7185 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7186#ifdef RT_ARCH_X86
7187 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7188#else
7189 IEM_MC_ARG(uint64_t, u64Src, 2);
7190#endif
7191 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7192
7193 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7194 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7195 IEM_MC_REF_EFLAGS(pEFlags);
7196#ifdef RT_ARCH_X86
7197 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7198 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7199 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7200 else
7201 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7202#else
7203 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7204 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7205 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7206 else
7207 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7208#endif
7209
7210 IEM_MC_ADVANCE_RIP();
7211 IEM_MC_END();
7212 return VINF_SUCCESS;
7213
7214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7215 }
7216 }
7217 else
7218 {
7219 switch (pVCpu->iem.s.enmEffOpSize)
7220 {
7221 case IEMMODE_16BIT:
7222 IEM_MC_BEGIN(4, 3);
7223 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7224 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7225 IEM_MC_ARG(uint16_t, u16Src, 2);
7226 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7228 IEM_MC_LOCAL(uint16_t, u16Ax);
7229
7230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7231 IEMOP_HLP_DONE_DECODING();
7232 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7233 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7234 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7235 IEM_MC_FETCH_EFLAGS(EFlags);
7236 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7237 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7238 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7239 else
7240 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7241
7242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7243 IEM_MC_COMMIT_EFLAGS(EFlags);
7244 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7245 IEM_MC_ADVANCE_RIP();
7246 IEM_MC_END();
7247 return VINF_SUCCESS;
7248
7249 case IEMMODE_32BIT:
7250 IEM_MC_BEGIN(4, 3);
7251 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7252 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7253 IEM_MC_ARG(uint32_t, u32Src, 2);
7254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7256 IEM_MC_LOCAL(uint32_t, u32Eax);
7257
7258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7259 IEMOP_HLP_DONE_DECODING();
7260 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7261 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7262 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7263 IEM_MC_FETCH_EFLAGS(EFlags);
7264 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7265 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7266 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7267 else
7268 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7269
7270 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7271 IEM_MC_COMMIT_EFLAGS(EFlags);
7272 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7273 IEM_MC_ADVANCE_RIP();
7274 IEM_MC_END();
7275 return VINF_SUCCESS;
7276
7277 case IEMMODE_64BIT:
7278 IEM_MC_BEGIN(4, 3);
7279 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7280 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7281#ifdef RT_ARCH_X86
7282 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7283#else
7284 IEM_MC_ARG(uint64_t, u64Src, 2);
7285#endif
7286 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7288 IEM_MC_LOCAL(uint64_t, u64Rax);
7289
7290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7291 IEMOP_HLP_DONE_DECODING();
7292 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7293 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7294 IEM_MC_FETCH_EFLAGS(EFlags);
7295 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7296#ifdef RT_ARCH_X86
7297 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7298 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7299 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7300 else
7301 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7302#else
7303 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7304 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7305 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7306 else
7307 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7308#endif
7309
7310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7311 IEM_MC_COMMIT_EFLAGS(EFlags);
7312 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7313 IEM_MC_ADVANCE_RIP();
7314 IEM_MC_END();
7315 return VINF_SUCCESS;
7316
7317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7318 }
7319 }
7320}
7321
7322
7323FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7324{
7325 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7326 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7327
7328 switch (pVCpu->iem.s.enmEffOpSize)
7329 {
7330 case IEMMODE_16BIT:
7331 IEM_MC_BEGIN(5, 1);
7332 IEM_MC_ARG(uint16_t, uSel, 0);
7333 IEM_MC_ARG(uint16_t, offSeg, 1);
7334 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7335 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7336 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7337 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7340 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7341 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7342 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7343 IEM_MC_END();
7344 return VINF_SUCCESS;
7345
7346 case IEMMODE_32BIT:
7347 IEM_MC_BEGIN(5, 1);
7348 IEM_MC_ARG(uint16_t, uSel, 0);
7349 IEM_MC_ARG(uint32_t, offSeg, 1);
7350 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7351 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7352 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7353 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7356 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7357 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7358 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7359 IEM_MC_END();
7360 return VINF_SUCCESS;
7361
7362 case IEMMODE_64BIT:
7363 IEM_MC_BEGIN(5, 1);
7364 IEM_MC_ARG(uint16_t, uSel, 0);
7365 IEM_MC_ARG(uint64_t, offSeg, 1);
7366 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7367 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7368 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7369 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7372 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7373 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7374 else
7375 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7376 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7377 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7378 IEM_MC_END();
7379 return VINF_SUCCESS;
7380
7381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7382 }
7383}
7384
7385
7386/** Opcode 0x0f 0xb2. */
7387FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7388{
7389 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7390 IEMOP_HLP_MIN_386();
7391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7392 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7393 return IEMOP_RAISE_INVALID_OPCODE();
7394 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7395}
7396
7397
7398/** Opcode 0x0f 0xb3. */
7399FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7400{
7401 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7402 IEMOP_HLP_MIN_386();
7403 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7404}
7405
7406
7407/** Opcode 0x0f 0xb4. */
7408FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7409{
7410 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7411 IEMOP_HLP_MIN_386();
7412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7413 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7414 return IEMOP_RAISE_INVALID_OPCODE();
7415 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7416}
7417
7418
7419/** Opcode 0x0f 0xb5. */
7420FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7421{
7422 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7423 IEMOP_HLP_MIN_386();
7424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7425 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7426 return IEMOP_RAISE_INVALID_OPCODE();
7427 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7428}
7429
7430
7431/** Opcode 0x0f 0xb6. */
7432FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7433{
7434 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7435 IEMOP_HLP_MIN_386();
7436
7437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7438
7439 /*
7440 * If rm is denoting a register, no more instruction bytes.
7441 */
7442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7443 {
7444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7445 switch (pVCpu->iem.s.enmEffOpSize)
7446 {
7447 case IEMMODE_16BIT:
7448 IEM_MC_BEGIN(0, 1);
7449 IEM_MC_LOCAL(uint16_t, u16Value);
7450 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7451 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7452 IEM_MC_ADVANCE_RIP();
7453 IEM_MC_END();
7454 return VINF_SUCCESS;
7455
7456 case IEMMODE_32BIT:
7457 IEM_MC_BEGIN(0, 1);
7458 IEM_MC_LOCAL(uint32_t, u32Value);
7459 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7460 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7461 IEM_MC_ADVANCE_RIP();
7462 IEM_MC_END();
7463 return VINF_SUCCESS;
7464
7465 case IEMMODE_64BIT:
7466 IEM_MC_BEGIN(0, 1);
7467 IEM_MC_LOCAL(uint64_t, u64Value);
7468 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7469 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7470 IEM_MC_ADVANCE_RIP();
7471 IEM_MC_END();
7472 return VINF_SUCCESS;
7473
7474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7475 }
7476 }
7477 else
7478 {
7479 /*
7480 * We're loading a register from memory.
7481 */
7482 switch (pVCpu->iem.s.enmEffOpSize)
7483 {
7484 case IEMMODE_16BIT:
7485 IEM_MC_BEGIN(0, 2);
7486 IEM_MC_LOCAL(uint16_t, u16Value);
7487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7490 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7491 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7492 IEM_MC_ADVANCE_RIP();
7493 IEM_MC_END();
7494 return VINF_SUCCESS;
7495
7496 case IEMMODE_32BIT:
7497 IEM_MC_BEGIN(0, 2);
7498 IEM_MC_LOCAL(uint32_t, u32Value);
7499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7502 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7503 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7504 IEM_MC_ADVANCE_RIP();
7505 IEM_MC_END();
7506 return VINF_SUCCESS;
7507
7508 case IEMMODE_64BIT:
7509 IEM_MC_BEGIN(0, 2);
7510 IEM_MC_LOCAL(uint64_t, u64Value);
7511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7514 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7515 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7516 IEM_MC_ADVANCE_RIP();
7517 IEM_MC_END();
7518 return VINF_SUCCESS;
7519
7520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7521 }
7522 }
7523}
7524
7525
7526/** Opcode 0x0f 0xb7. */
7527FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7528{
7529 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7530 IEMOP_HLP_MIN_386();
7531
7532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7533
7534 /** @todo Not entirely sure how the operand size prefix is handled here,
7535 * assuming that it will be ignored. Would be nice to have a few
7536 * test for this. */
7537 /*
7538 * If rm is denoting a register, no more instruction bytes.
7539 */
7540 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7541 {
7542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7543 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7544 {
7545 IEM_MC_BEGIN(0, 1);
7546 IEM_MC_LOCAL(uint32_t, u32Value);
7547 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7548 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7549 IEM_MC_ADVANCE_RIP();
7550 IEM_MC_END();
7551 }
7552 else
7553 {
7554 IEM_MC_BEGIN(0, 1);
7555 IEM_MC_LOCAL(uint64_t, u64Value);
7556 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7557 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7558 IEM_MC_ADVANCE_RIP();
7559 IEM_MC_END();
7560 }
7561 }
7562 else
7563 {
7564 /*
7565 * We're loading a register from memory.
7566 */
7567 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7568 {
7569 IEM_MC_BEGIN(0, 2);
7570 IEM_MC_LOCAL(uint32_t, u32Value);
7571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7574 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7575 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7576 IEM_MC_ADVANCE_RIP();
7577 IEM_MC_END();
7578 }
7579 else
7580 {
7581 IEM_MC_BEGIN(0, 2);
7582 IEM_MC_LOCAL(uint64_t, u64Value);
7583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7586 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7587 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7588 IEM_MC_ADVANCE_RIP();
7589 IEM_MC_END();
7590 }
7591 }
7592 return VINF_SUCCESS;
7593}
7594
7595
7596/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7597FNIEMOP_UD_STUB(iemOp_jmpe);
7598/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7599FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7600
7601
7602/**
7603 * @opcode 0xb9
7604 * @opinvalid intel-modrm
7605 * @optest ->
7606 */
7607FNIEMOP_DEF(iemOp_Grp10)
7608{
7609 /*
7610 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7611 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7612 */
7613 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7614 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7615 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7616}
7617
7618
7619/** Opcode 0x0f 0xba. */
7620FNIEMOP_DEF(iemOp_Grp8)
7621{
7622 IEMOP_HLP_MIN_386();
7623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7624 PCIEMOPBINSIZES pImpl;
7625 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7626 {
7627 case 0: case 1: case 2: case 3:
7628 /* Both AMD and Intel want full modr/m decoding and imm8. */
7629 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7630 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7631 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7632 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7633 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7635 }
7636 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7637
7638 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7639 {
7640 /* register destination. */
7641 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7643
7644 switch (pVCpu->iem.s.enmEffOpSize)
7645 {
7646 case IEMMODE_16BIT:
7647 IEM_MC_BEGIN(3, 0);
7648 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7649 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7650 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7651
7652 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7653 IEM_MC_REF_EFLAGS(pEFlags);
7654 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7655
7656 IEM_MC_ADVANCE_RIP();
7657 IEM_MC_END();
7658 return VINF_SUCCESS;
7659
7660 case IEMMODE_32BIT:
7661 IEM_MC_BEGIN(3, 0);
7662 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7663 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7664 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7665
7666 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7667 IEM_MC_REF_EFLAGS(pEFlags);
7668 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7669
7670 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7671 IEM_MC_ADVANCE_RIP();
7672 IEM_MC_END();
7673 return VINF_SUCCESS;
7674
7675 case IEMMODE_64BIT:
7676 IEM_MC_BEGIN(3, 0);
7677 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7678 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7679 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7680
7681 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7682 IEM_MC_REF_EFLAGS(pEFlags);
7683 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7684
7685 IEM_MC_ADVANCE_RIP();
7686 IEM_MC_END();
7687 return VINF_SUCCESS;
7688
7689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7690 }
7691 }
7692 else
7693 {
7694 /* memory destination. */
7695
7696 uint32_t fAccess;
7697 if (pImpl->pfnLockedU16)
7698 fAccess = IEM_ACCESS_DATA_RW;
7699 else /* BT */
7700 fAccess = IEM_ACCESS_DATA_R;
7701
7702 /** @todo test negative bit offsets! */
7703 switch (pVCpu->iem.s.enmEffOpSize)
7704 {
7705 case IEMMODE_16BIT:
7706 IEM_MC_BEGIN(3, 1);
7707 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7708 IEM_MC_ARG(uint16_t, u16Src, 1);
7709 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7711
7712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7713 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7714 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7715 if (pImpl->pfnLockedU16)
7716 IEMOP_HLP_DONE_DECODING();
7717 else
7718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7719 IEM_MC_FETCH_EFLAGS(EFlags);
7720 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7721 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7722 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7723 else
7724 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7725 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7726
7727 IEM_MC_COMMIT_EFLAGS(EFlags);
7728 IEM_MC_ADVANCE_RIP();
7729 IEM_MC_END();
7730 return VINF_SUCCESS;
7731
7732 case IEMMODE_32BIT:
7733 IEM_MC_BEGIN(3, 1);
7734 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7735 IEM_MC_ARG(uint32_t, u32Src, 1);
7736 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7738
7739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7740 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7741 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7742 if (pImpl->pfnLockedU16)
7743 IEMOP_HLP_DONE_DECODING();
7744 else
7745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7746 IEM_MC_FETCH_EFLAGS(EFlags);
7747 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7748 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7749 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7750 else
7751 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7752 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7753
7754 IEM_MC_COMMIT_EFLAGS(EFlags);
7755 IEM_MC_ADVANCE_RIP();
7756 IEM_MC_END();
7757 return VINF_SUCCESS;
7758
7759 case IEMMODE_64BIT:
7760 IEM_MC_BEGIN(3, 1);
7761 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7762 IEM_MC_ARG(uint64_t, u64Src, 1);
7763 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7765
7766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7767 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7768 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7769 if (pImpl->pfnLockedU16)
7770 IEMOP_HLP_DONE_DECODING();
7771 else
7772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7773 IEM_MC_FETCH_EFLAGS(EFlags);
7774 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7775 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7776 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7777 else
7778 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7779 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7780
7781 IEM_MC_COMMIT_EFLAGS(EFlags);
7782 IEM_MC_ADVANCE_RIP();
7783 IEM_MC_END();
7784 return VINF_SUCCESS;
7785
7786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7787 }
7788 }
7789}
7790
7791
7792/** Opcode 0x0f 0xbb. */
7793FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7794{
7795 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7796 IEMOP_HLP_MIN_386();
7797 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7798}
7799
7800
7801/** Opcode 0x0f 0xbc. */
7802FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7803{
7804 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7805 IEMOP_HLP_MIN_386();
7806 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7807 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7808}
7809
7810
7811/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7812FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7813
7814
7815/** Opcode 0x0f 0xbd. */
7816FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7817{
7818 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7819 IEMOP_HLP_MIN_386();
7820 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7821 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7822}
7823
7824
7825/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7826FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7827
7828
7829/** Opcode 0x0f 0xbe. */
7830FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7831{
7832 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7833 IEMOP_HLP_MIN_386();
7834
7835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7836
7837 /*
7838 * If rm is denoting a register, no more instruction bytes.
7839 */
7840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7841 {
7842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7843 switch (pVCpu->iem.s.enmEffOpSize)
7844 {
7845 case IEMMODE_16BIT:
7846 IEM_MC_BEGIN(0, 1);
7847 IEM_MC_LOCAL(uint16_t, u16Value);
7848 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7849 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7850 IEM_MC_ADVANCE_RIP();
7851 IEM_MC_END();
7852 return VINF_SUCCESS;
7853
7854 case IEMMODE_32BIT:
7855 IEM_MC_BEGIN(0, 1);
7856 IEM_MC_LOCAL(uint32_t, u32Value);
7857 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7858 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7859 IEM_MC_ADVANCE_RIP();
7860 IEM_MC_END();
7861 return VINF_SUCCESS;
7862
7863 case IEMMODE_64BIT:
7864 IEM_MC_BEGIN(0, 1);
7865 IEM_MC_LOCAL(uint64_t, u64Value);
7866 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7867 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7868 IEM_MC_ADVANCE_RIP();
7869 IEM_MC_END();
7870 return VINF_SUCCESS;
7871
7872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7873 }
7874 }
7875 else
7876 {
7877 /*
7878 * We're loading a register from memory.
7879 */
7880 switch (pVCpu->iem.s.enmEffOpSize)
7881 {
7882 case IEMMODE_16BIT:
7883 IEM_MC_BEGIN(0, 2);
7884 IEM_MC_LOCAL(uint16_t, u16Value);
7885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7888 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7889 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7890 IEM_MC_ADVANCE_RIP();
7891 IEM_MC_END();
7892 return VINF_SUCCESS;
7893
7894 case IEMMODE_32BIT:
7895 IEM_MC_BEGIN(0, 2);
7896 IEM_MC_LOCAL(uint32_t, u32Value);
7897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7900 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7901 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7902 IEM_MC_ADVANCE_RIP();
7903 IEM_MC_END();
7904 return VINF_SUCCESS;
7905
7906 case IEMMODE_64BIT:
7907 IEM_MC_BEGIN(0, 2);
7908 IEM_MC_LOCAL(uint64_t, u64Value);
7909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7912 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7913 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7914 IEM_MC_ADVANCE_RIP();
7915 IEM_MC_END();
7916 return VINF_SUCCESS;
7917
7918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7919 }
7920 }
7921}
7922
7923
7924/** Opcode 0x0f 0xbf. */
7925FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7926{
7927 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7928 IEMOP_HLP_MIN_386();
7929
7930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7931
7932 /** @todo Not entirely sure how the operand size prefix is handled here,
7933 * assuming that it will be ignored. Would be nice to have a few
7934 * test for this. */
7935 /*
7936 * If rm is denoting a register, no more instruction bytes.
7937 */
7938 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7939 {
7940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7941 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7942 {
7943 IEM_MC_BEGIN(0, 1);
7944 IEM_MC_LOCAL(uint32_t, u32Value);
7945 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7946 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7947 IEM_MC_ADVANCE_RIP();
7948 IEM_MC_END();
7949 }
7950 else
7951 {
7952 IEM_MC_BEGIN(0, 1);
7953 IEM_MC_LOCAL(uint64_t, u64Value);
7954 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7955 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7956 IEM_MC_ADVANCE_RIP();
7957 IEM_MC_END();
7958 }
7959 }
7960 else
7961 {
7962 /*
7963 * We're loading a register from memory.
7964 */
7965 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7966 {
7967 IEM_MC_BEGIN(0, 2);
7968 IEM_MC_LOCAL(uint32_t, u32Value);
7969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7972 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7973 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7974 IEM_MC_ADVANCE_RIP();
7975 IEM_MC_END();
7976 }
7977 else
7978 {
7979 IEM_MC_BEGIN(0, 2);
7980 IEM_MC_LOCAL(uint64_t, u64Value);
7981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7984 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7985 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7986 IEM_MC_ADVANCE_RIP();
7987 IEM_MC_END();
7988 }
7989 }
7990 return VINF_SUCCESS;
7991}
7992
7993
7994/** Opcode 0x0f 0xc0. */
7995FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7996{
7997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7998 IEMOP_HLP_MIN_486();
7999 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8000
8001 /*
8002 * If rm is denoting a register, no more instruction bytes.
8003 */
8004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8005 {
8006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8007
8008 IEM_MC_BEGIN(3, 0);
8009 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8010 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8011 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8012
8013 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8014 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8015 IEM_MC_REF_EFLAGS(pEFlags);
8016 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8017
8018 IEM_MC_ADVANCE_RIP();
8019 IEM_MC_END();
8020 }
8021 else
8022 {
8023 /*
8024 * We're accessing memory.
8025 */
8026 IEM_MC_BEGIN(3, 3);
8027 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8028 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8029 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8030 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8032
8033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8034 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8035 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8036 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8037 IEM_MC_FETCH_EFLAGS(EFlags);
8038 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8039 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8040 else
8041 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8042
8043 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8044 IEM_MC_COMMIT_EFLAGS(EFlags);
8045 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8046 IEM_MC_ADVANCE_RIP();
8047 IEM_MC_END();
8048 return VINF_SUCCESS;
8049 }
8050 return VINF_SUCCESS;
8051}
8052
8053
8054/** Opcode 0x0f 0xc1. */
8055FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8056{
8057 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8058 IEMOP_HLP_MIN_486();
8059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8060
8061 /*
8062 * If rm is denoting a register, no more instruction bytes.
8063 */
8064 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8065 {
8066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8067
8068 switch (pVCpu->iem.s.enmEffOpSize)
8069 {
8070 case IEMMODE_16BIT:
8071 IEM_MC_BEGIN(3, 0);
8072 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8073 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8074 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8075
8076 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8077 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8078 IEM_MC_REF_EFLAGS(pEFlags);
8079 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8080
8081 IEM_MC_ADVANCE_RIP();
8082 IEM_MC_END();
8083 return VINF_SUCCESS;
8084
8085 case IEMMODE_32BIT:
8086 IEM_MC_BEGIN(3, 0);
8087 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8088 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8090
8091 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8092 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8093 IEM_MC_REF_EFLAGS(pEFlags);
8094 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8095
8096 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8097 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8098 IEM_MC_ADVANCE_RIP();
8099 IEM_MC_END();
8100 return VINF_SUCCESS;
8101
8102 case IEMMODE_64BIT:
8103 IEM_MC_BEGIN(3, 0);
8104 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8105 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8106 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8107
8108 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8109 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8110 IEM_MC_REF_EFLAGS(pEFlags);
8111 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8112
8113 IEM_MC_ADVANCE_RIP();
8114 IEM_MC_END();
8115 return VINF_SUCCESS;
8116
8117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8118 }
8119 }
8120 else
8121 {
8122 /*
8123 * We're accessing memory.
8124 */
8125 switch (pVCpu->iem.s.enmEffOpSize)
8126 {
8127 case IEMMODE_16BIT:
8128 IEM_MC_BEGIN(3, 3);
8129 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8130 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8131 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8132 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8134
8135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8136 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8137 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8138 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8139 IEM_MC_FETCH_EFLAGS(EFlags);
8140 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8141 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8142 else
8143 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8144
8145 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8146 IEM_MC_COMMIT_EFLAGS(EFlags);
8147 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8148 IEM_MC_ADVANCE_RIP();
8149 IEM_MC_END();
8150 return VINF_SUCCESS;
8151
8152 case IEMMODE_32BIT:
8153 IEM_MC_BEGIN(3, 3);
8154 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8155 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8156 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8157 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8159
8160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8161 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8162 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8163 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8164 IEM_MC_FETCH_EFLAGS(EFlags);
8165 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8166 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8167 else
8168 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8169
8170 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8171 IEM_MC_COMMIT_EFLAGS(EFlags);
8172 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8173 IEM_MC_ADVANCE_RIP();
8174 IEM_MC_END();
8175 return VINF_SUCCESS;
8176
8177 case IEMMODE_64BIT:
8178 IEM_MC_BEGIN(3, 3);
8179 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8180 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8181 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8182 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8184
8185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8186 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8187 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8188 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8189 IEM_MC_FETCH_EFLAGS(EFlags);
8190 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8191 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8192 else
8193 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8194
8195 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8196 IEM_MC_COMMIT_EFLAGS(EFlags);
8197 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8198 IEM_MC_ADVANCE_RIP();
8199 IEM_MC_END();
8200 return VINF_SUCCESS;
8201
8202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8203 }
8204 }
8205}
8206
8207
8208/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8209FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8210/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8211FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8212/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8213FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8214/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8215FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8216
8217
8218/** Opcode 0x0f 0xc3. */
8219FNIEMOP_DEF(iemOp_movnti_My_Gy)
8220{
8221 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8222
8223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8224
8225 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8226 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8227 {
8228 switch (pVCpu->iem.s.enmEffOpSize)
8229 {
8230 case IEMMODE_32BIT:
8231 IEM_MC_BEGIN(0, 2);
8232 IEM_MC_LOCAL(uint32_t, u32Value);
8233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8234
8235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8237 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8238 return IEMOP_RAISE_INVALID_OPCODE();
8239
8240 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8241 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8242 IEM_MC_ADVANCE_RIP();
8243 IEM_MC_END();
8244 break;
8245
8246 case IEMMODE_64BIT:
8247 IEM_MC_BEGIN(0, 2);
8248 IEM_MC_LOCAL(uint64_t, u64Value);
8249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8250
8251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8253 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8254 return IEMOP_RAISE_INVALID_OPCODE();
8255
8256 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8257 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8258 IEM_MC_ADVANCE_RIP();
8259 IEM_MC_END();
8260 break;
8261
8262 case IEMMODE_16BIT:
8263 /** @todo check this form. */
8264 return IEMOP_RAISE_INVALID_OPCODE();
8265 }
8266 }
8267 else
8268 return IEMOP_RAISE_INVALID_OPCODE();
8269 return VINF_SUCCESS;
8270}
8271/* Opcode 0x66 0x0f 0xc3 - invalid */
8272/* Opcode 0xf3 0x0f 0xc3 - invalid */
8273/* Opcode 0xf2 0x0f 0xc3 - invalid */
8274
8275/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8276FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8277/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8278FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8279/* Opcode 0xf3 0x0f 0xc4 - invalid */
8280/* Opcode 0xf2 0x0f 0xc4 - invalid */
8281
8282/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8283FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8284/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8285FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8286/* Opcode 0xf3 0x0f 0xc5 - invalid */
8287/* Opcode 0xf2 0x0f 0xc5 - invalid */
8288
8289/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8290FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8291/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8292FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8293/* Opcode 0xf3 0x0f 0xc6 - invalid */
8294/* Opcode 0xf2 0x0f 0xc6 - invalid */
8295
8296
8297/** Opcode 0x0f 0xc7 !11/1. */
8298FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8299{
8300 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8301
8302 IEM_MC_BEGIN(4, 3);
8303 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8304 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8305 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8306 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8307 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8308 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8310
8311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8312 IEMOP_HLP_DONE_DECODING();
8313 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8314
8315 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8316 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8317 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8318
8319 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8320 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8321 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8322
8323 IEM_MC_FETCH_EFLAGS(EFlags);
8324 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8325 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8326 else
8327 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8328
8329 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8330 IEM_MC_COMMIT_EFLAGS(EFlags);
8331 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8332 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8333 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8334 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8335 IEM_MC_ENDIF();
8336 IEM_MC_ADVANCE_RIP();
8337
8338 IEM_MC_END();
8339 return VINF_SUCCESS;
8340}
8341
8342
8343/** Opcode REX.W 0x0f 0xc7 !11/1. */
8344FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8345{
8346 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8347 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8348 {
8349#if 0
8350 RT_NOREF(bRm);
8351 IEMOP_BITCH_ABOUT_STUB();
8352 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8353#else
8354 IEM_MC_BEGIN(4, 3);
8355 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8356 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8357 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8358 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8359 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8360 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8362
8363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8364 IEMOP_HLP_DONE_DECODING();
8365 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8366 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8367
8368 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8369 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8370 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8371
8372 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8373 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8374 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8375
8376 IEM_MC_FETCH_EFLAGS(EFlags);
8377# ifdef RT_ARCH_AMD64
8378 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8379 {
8380 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8381 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8382 else
8383 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8384 }
8385 else
8386# endif
8387 {
8388 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8389 accesses and not all all atomic, which works fine on in UNI CPU guest
8390 configuration (ignoring DMA). If guest SMP is active we have no choice
8391 but to use a rendezvous callback here. Sigh. */
8392 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8393 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8394 else
8395 {
8396 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8397 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8398 }
8399 }
8400
8401 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8402 IEM_MC_COMMIT_EFLAGS(EFlags);
8403 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8404 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8405 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8406 IEM_MC_ENDIF();
8407 IEM_MC_ADVANCE_RIP();
8408
8409 IEM_MC_END();
8410 return VINF_SUCCESS;
8411#endif
8412 }
8413 Log(("cmpxchg16b -> #UD\n"));
8414 return IEMOP_RAISE_INVALID_OPCODE();
8415}
8416
8417FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8418{
8419 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8420 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8421 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8422}
8423
8424/** Opcode 0x0f 0xc7 11/6. */
8425FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8426
8427/** Opcode 0x0f 0xc7 !11/6. */
8428FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8429
8430/** Opcode 0x66 0x0f 0xc7 !11/6. */
8431FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8432
8433/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8434FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8435
8436/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8437FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8438
8439/** Opcode 0x0f 0xc7 11/7. */
8440FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8441
8442
8443/**
8444 * Group 9 jump table for register variant.
8445 */
8446IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8447{ /* pfx: none, 066h, 0f3h, 0f2h */
8448 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8449 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8450 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8451 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8452 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8453 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8454 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8455 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8456};
8457AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8458
8459
8460/**
8461 * Group 9 jump table for memory variant.
8462 */
8463IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8464{ /* pfx: none, 066h, 0f3h, 0f2h */
8465 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8466 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8467 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8468 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8469 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8470 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8471 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8472 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8473};
8474AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8475
8476
8477/** Opcode 0x0f 0xc7. */
8478FNIEMOP_DEF(iemOp_Grp9)
8479{
8480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8482 /* register, register */
8483 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8484 + pVCpu->iem.s.idxPrefix], bRm);
8485 /* memory, register */
8486 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8487 + pVCpu->iem.s.idxPrefix], bRm);
8488}
8489
8490
8491/**
8492 * Common 'bswap register' helper.
8493 */
8494FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8495{
8496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8497 switch (pVCpu->iem.s.enmEffOpSize)
8498 {
8499 case IEMMODE_16BIT:
8500 IEM_MC_BEGIN(1, 0);
8501 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8502 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8503 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8504 IEM_MC_ADVANCE_RIP();
8505 IEM_MC_END();
8506 return VINF_SUCCESS;
8507
8508 case IEMMODE_32BIT:
8509 IEM_MC_BEGIN(1, 0);
8510 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8511 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8512 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8513 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8514 IEM_MC_ADVANCE_RIP();
8515 IEM_MC_END();
8516 return VINF_SUCCESS;
8517
8518 case IEMMODE_64BIT:
8519 IEM_MC_BEGIN(1, 0);
8520 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8521 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8522 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8523 IEM_MC_ADVANCE_RIP();
8524 IEM_MC_END();
8525 return VINF_SUCCESS;
8526
8527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8528 }
8529}
8530
8531
8532/** Opcode 0x0f 0xc8. */
8533FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8534{
8535 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8536 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8537 prefix. REX.B is the correct prefix it appears. For a parallel
8538 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8539 IEMOP_HLP_MIN_486();
8540 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8541}
8542
8543
8544/** Opcode 0x0f 0xc9. */
8545FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8546{
8547 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8548 IEMOP_HLP_MIN_486();
8549 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8550}
8551
8552
8553/** Opcode 0x0f 0xca. */
8554FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8555{
8556 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8557 IEMOP_HLP_MIN_486();
8558 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8559}
8560
8561
8562/** Opcode 0x0f 0xcb. */
8563FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8564{
8565 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8566 IEMOP_HLP_MIN_486();
8567 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8568}
8569
8570
8571/** Opcode 0x0f 0xcc. */
8572FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8573{
8574 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8575 IEMOP_HLP_MIN_486();
8576 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8577}
8578
8579
8580/** Opcode 0x0f 0xcd. */
8581FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8582{
8583 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8584 IEMOP_HLP_MIN_486();
8585 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8586}
8587
8588
8589/** Opcode 0x0f 0xce. */
8590FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8591{
8592 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8593 IEMOP_HLP_MIN_486();
8594 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8595}
8596
8597
8598/** Opcode 0x0f 0xcf. */
8599FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8600{
8601 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8602 IEMOP_HLP_MIN_486();
8603 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8604}
8605
8606
8607/* Opcode 0x0f 0xd0 - invalid */
8608/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8609FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8610/* Opcode 0xf3 0x0f 0xd0 - invalid */
8611/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8612FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8613
8614/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8615FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8616/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8617FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8618/* Opcode 0xf3 0x0f 0xd1 - invalid */
8619/* Opcode 0xf2 0x0f 0xd1 - invalid */
8620
8621/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8622FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8623/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8624FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8625/* Opcode 0xf3 0x0f 0xd2 - invalid */
8626/* Opcode 0xf2 0x0f 0xd2 - invalid */
8627
8628/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8629FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8630/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8631FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8632/* Opcode 0xf3 0x0f 0xd3 - invalid */
8633/* Opcode 0xf2 0x0f 0xd3 - invalid */
8634
8635/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8636FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8637/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8638FNIEMOP_STUB(iemOp_paddq_Vx_W);
8639/* Opcode 0xf3 0x0f 0xd4 - invalid */
8640/* Opcode 0xf2 0x0f 0xd4 - invalid */
8641
8642/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8643FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8644/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8645FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8646/* Opcode 0xf3 0x0f 0xd5 - invalid */
8647/* Opcode 0xf2 0x0f 0xd5 - invalid */
8648
8649/* Opcode 0x0f 0xd6 - invalid */
8650
8651/**
8652 * @opcode 0xd6
8653 * @oppfx 0x66
8654 * @opcpuid sse2
8655 * @opgroup og_sse2_pcksclr_datamove
8656 * @opxcpttype none
8657 * @optest op1=-1 op2=2 -> op1=2
8658 * @optest op1=0 op2=-42 -> op1=-42
8659 */
8660FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8661{
8662 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8665 {
8666 /*
8667 * Register, register.
8668 */
8669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8670 IEM_MC_BEGIN(0, 2);
8671 IEM_MC_LOCAL(uint64_t, uSrc);
8672
8673 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8674 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8675
8676 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8677 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8678
8679 IEM_MC_ADVANCE_RIP();
8680 IEM_MC_END();
8681 }
8682 else
8683 {
8684 /*
8685 * Memory, register.
8686 */
8687 IEM_MC_BEGIN(0, 2);
8688 IEM_MC_LOCAL(uint64_t, uSrc);
8689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8690
8691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8693 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8694 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8695
8696 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8697 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8698
8699 IEM_MC_ADVANCE_RIP();
8700 IEM_MC_END();
8701 }
8702 return VINF_SUCCESS;
8703}
8704
8705
8706/**
8707 * @opcode 0xd6
8708 * @opcodesub 11 mr/reg
8709 * @oppfx f3
8710 * @opcpuid sse2
8711 * @opgroup og_sse2_simdint_datamove
8712 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8713 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8714 */
8715FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8716{
8717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8718 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8719 {
8720 /*
8721 * Register, register.
8722 */
8723 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8725 IEM_MC_BEGIN(0, 1);
8726 IEM_MC_LOCAL(uint64_t, uSrc);
8727
8728 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8730
8731 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8732 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8733 IEM_MC_FPU_TO_MMX_MODE();
8734
8735 IEM_MC_ADVANCE_RIP();
8736 IEM_MC_END();
8737 return VINF_SUCCESS;
8738 }
8739
8740 /**
8741 * @opdone
8742 * @opmnemonic udf30fd6mem
8743 * @opcode 0xd6
8744 * @opcodesub !11 mr/reg
8745 * @oppfx f3
8746 * @opunused intel-modrm
8747 * @opcpuid sse
8748 * @optest ->
8749 */
8750 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8751}
8752
8753
8754/**
8755 * @opcode 0xd6
8756 * @opcodesub 11 mr/reg
8757 * @oppfx f2
8758 * @opcpuid sse2
8759 * @opgroup og_sse2_simdint_datamove
8760 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8761 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8762 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8763 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8764 * @optest op1=-42 op2=0xfedcba9876543210
8765 * -> op1=0xfedcba9876543210 ftw=0xff
8766 */
8767FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8768{
8769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8771 {
8772 /*
8773 * Register, register.
8774 */
8775 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8777 IEM_MC_BEGIN(0, 1);
8778 IEM_MC_LOCAL(uint64_t, uSrc);
8779
8780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8781 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8782
8783 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8784 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8785 IEM_MC_FPU_TO_MMX_MODE();
8786
8787 IEM_MC_ADVANCE_RIP();
8788 IEM_MC_END();
8789 return VINF_SUCCESS;
8790 }
8791
8792 /**
8793 * @opdone
8794 * @opmnemonic udf20fd6mem
8795 * @opcode 0xd6
8796 * @opcodesub !11 mr/reg
8797 * @oppfx f2
8798 * @opunused intel-modrm
8799 * @opcpuid sse
8800 * @optest ->
8801 */
8802 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8803}
8804
8805/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8806FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8807{
8808 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8809 /** @todo testcase: Check that the instruction implicitly clears the high
8810 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8811 * and opcode modifications are made to work with the whole width (not
8812 * just 128). */
8813 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8814 /* Docs says register only. */
8815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8817 {
8818 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8819 IEM_MC_BEGIN(2, 0);
8820 IEM_MC_ARG(uint64_t *, pDst, 0);
8821 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8822 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8823 IEM_MC_PREPARE_FPU_USAGE();
8824 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8825 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8826 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8827 IEM_MC_ADVANCE_RIP();
8828 IEM_MC_END();
8829 return VINF_SUCCESS;
8830 }
8831 return IEMOP_RAISE_INVALID_OPCODE();
8832}
8833
8834/** Opcode 0x66 0x0f 0xd7 - */
8835FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8836{
8837 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8838 /** @todo testcase: Check that the instruction implicitly clears the high
8839 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8840 * and opcode modifications are made to work with the whole width (not
8841 * just 128). */
8842 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8843 /* Docs says register only. */
8844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8846 {
8847 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8848 IEM_MC_BEGIN(2, 0);
8849 IEM_MC_ARG(uint64_t *, pDst, 0);
8850 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8851 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8852 IEM_MC_PREPARE_SSE_USAGE();
8853 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8854 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8855 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8856 IEM_MC_ADVANCE_RIP();
8857 IEM_MC_END();
8858 return VINF_SUCCESS;
8859 }
8860 return IEMOP_RAISE_INVALID_OPCODE();
8861}
8862
8863/* Opcode 0xf3 0x0f 0xd7 - invalid */
8864/* Opcode 0xf2 0x0f 0xd7 - invalid */
8865
8866
8867/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8868FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8869/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8870FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8871/* Opcode 0xf3 0x0f 0xd8 - invalid */
8872/* Opcode 0xf2 0x0f 0xd8 - invalid */
8873
8874/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8875FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8876/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8877FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8878/* Opcode 0xf3 0x0f 0xd9 - invalid */
8879/* Opcode 0xf2 0x0f 0xd9 - invalid */
8880
8881/** Opcode 0x0f 0xda - pminub Pq, Qq */
8882FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8883/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8884FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8885/* Opcode 0xf3 0x0f 0xda - invalid */
8886/* Opcode 0xf2 0x0f 0xda - invalid */
8887
8888/** Opcode 0x0f 0xdb - pand Pq, Qq */
8889FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8890/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8891FNIEMOP_STUB(iemOp_pand_Vx_W);
8892/* Opcode 0xf3 0x0f 0xdb - invalid */
8893/* Opcode 0xf2 0x0f 0xdb - invalid */
8894
8895/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8896FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8897/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8898FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8899/* Opcode 0xf3 0x0f 0xdc - invalid */
8900/* Opcode 0xf2 0x0f 0xdc - invalid */
8901
8902/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8903FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8904/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8905FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8906/* Opcode 0xf3 0x0f 0xdd - invalid */
8907/* Opcode 0xf2 0x0f 0xdd - invalid */
8908
8909/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8910FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8911/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8912FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8913/* Opcode 0xf3 0x0f 0xde - invalid */
8914/* Opcode 0xf2 0x0f 0xde - invalid */
8915
8916/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8917FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8918/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8919FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8920/* Opcode 0xf3 0x0f 0xdf - invalid */
8921/* Opcode 0xf2 0x0f 0xdf - invalid */
8922
8923/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8924FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8925/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8926FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8927/* Opcode 0xf3 0x0f 0xe0 - invalid */
8928/* Opcode 0xf2 0x0f 0xe0 - invalid */
8929
8930/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8931FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8932/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8933FNIEMOP_STUB(iemOp_psraw_Vx_W);
8934/* Opcode 0xf3 0x0f 0xe1 - invalid */
8935/* Opcode 0xf2 0x0f 0xe1 - invalid */
8936
8937/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8938FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8939/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8940FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8941/* Opcode 0xf3 0x0f 0xe2 - invalid */
8942/* Opcode 0xf2 0x0f 0xe2 - invalid */
8943
8944/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8945FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8946/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8947FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8948/* Opcode 0xf3 0x0f 0xe3 - invalid */
8949/* Opcode 0xf2 0x0f 0xe3 - invalid */
8950
8951/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8952FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8953/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8954FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8955/* Opcode 0xf3 0x0f 0xe4 - invalid */
8956/* Opcode 0xf2 0x0f 0xe4 - invalid */
8957
8958/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8959FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8960/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8961FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8962/* Opcode 0xf3 0x0f 0xe5 - invalid */
8963/* Opcode 0xf2 0x0f 0xe5 - invalid */
8964
8965/* Opcode 0x0f 0xe6 - invalid */
8966/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8967FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8968/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8969FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8970/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8971FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8972
8973
8974/**
8975 * @opcode 0xe7
8976 * @opcodesub !11 mr/reg
8977 * @oppfx none
8978 * @opcpuid sse
8979 * @opgroup og_sse1_cachect
8980 * @opxcpttype none
8981 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
8982 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8983 */
8984FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8985{
8986 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8988 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8989 {
8990 /* Register, memory. */
8991 IEM_MC_BEGIN(0, 2);
8992 IEM_MC_LOCAL(uint64_t, uSrc);
8993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8994
8995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8997 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8998 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8999
9000 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9001 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9002 IEM_MC_FPU_TO_MMX_MODE();
9003
9004 IEM_MC_ADVANCE_RIP();
9005 IEM_MC_END();
9006 return VINF_SUCCESS;
9007 }
9008 /**
9009 * @opdone
9010 * @opmnemonic ud0fe7reg
9011 * @opcode 0xe7
9012 * @opcodesub 11 mr/reg
9013 * @oppfx none
9014 * @opunused immediate
9015 * @opcpuid sse
9016 * @optest ->
9017 */
9018 return IEMOP_RAISE_INVALID_OPCODE();
9019}
9020
9021/**
9022 * @opcode 0xe7
9023 * @opcodesub !11 mr/reg
9024 * @oppfx 0x66
9025 * @opcpuid sse2
9026 * @opgroup og_sse2_cachect
9027 * @opxcpttype 1
9028 * @optest op1=-1 op2=2 -> op1=2
9029 * @optest op1=0 op2=-42 -> op1=-42
9030 */
9031FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9032{
9033 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9035 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9036 {
9037 /* Register, memory. */
9038 IEM_MC_BEGIN(0, 2);
9039 IEM_MC_LOCAL(RTUINT128U, uSrc);
9040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9041
9042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9044 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9045 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9046
9047 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9048 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9049
9050 IEM_MC_ADVANCE_RIP();
9051 IEM_MC_END();
9052 return VINF_SUCCESS;
9053 }
9054
9055 /**
9056 * @opdone
9057 * @opmnemonic ud660fe7reg
9058 * @opcode 0xe7
9059 * @opcodesub 11 mr/reg
9060 * @oppfx 0x66
9061 * @opunused immediate
9062 * @opcpuid sse
9063 * @optest ->
9064 */
9065 return IEMOP_RAISE_INVALID_OPCODE();
9066}
9067
9068/* Opcode 0xf3 0x0f 0xe7 - invalid */
9069/* Opcode 0xf2 0x0f 0xe7 - invalid */
9070
9071
9072/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9073FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9074/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9075FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9076/* Opcode 0xf3 0x0f 0xe8 - invalid */
9077/* Opcode 0xf2 0x0f 0xe8 - invalid */
9078
9079/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9080FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9081/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9082FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9083/* Opcode 0xf3 0x0f 0xe9 - invalid */
9084/* Opcode 0xf2 0x0f 0xe9 - invalid */
9085
9086/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9087FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9088/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9089FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9090/* Opcode 0xf3 0x0f 0xea - invalid */
9091/* Opcode 0xf2 0x0f 0xea - invalid */
9092
9093/** Opcode 0x0f 0xeb - por Pq, Qq */
9094FNIEMOP_STUB(iemOp_por_Pq_Qq);
9095/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9096FNIEMOP_STUB(iemOp_por_Vx_W);
9097/* Opcode 0xf3 0x0f 0xeb - invalid */
9098/* Opcode 0xf2 0x0f 0xeb - invalid */
9099
9100/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9101FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9102/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9103FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9104/* Opcode 0xf3 0x0f 0xec - invalid */
9105/* Opcode 0xf2 0x0f 0xec - invalid */
9106
9107/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9108FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9109/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9110FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9111/* Opcode 0xf3 0x0f 0xed - invalid */
9112/* Opcode 0xf2 0x0f 0xed - invalid */
9113
9114/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9115FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9116/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9117FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9118/* Opcode 0xf3 0x0f 0xee - invalid */
9119/* Opcode 0xf2 0x0f 0xee - invalid */
9120
9121
9122/** Opcode 0x0f 0xef - pxor Pq, Qq */
9123FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9124{
9125 IEMOP_MNEMONIC(pxor, "pxor");
9126 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9127}
9128
9129/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9130FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9131{
9132 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9133 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9134}
9135
9136/* Opcode 0xf3 0x0f 0xef - invalid */
9137/* Opcode 0xf2 0x0f 0xef - invalid */
9138
9139/* Opcode 0x0f 0xf0 - invalid */
9140/* Opcode 0x66 0x0f 0xf0 - invalid */
9141/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9142FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9143
9144/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9145FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9146/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9147FNIEMOP_STUB(iemOp_psllw_Vx_W);
9148/* Opcode 0xf2 0x0f 0xf1 - invalid */
9149
9150/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9151FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9152/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9153FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9154/* Opcode 0xf2 0x0f 0xf2 - invalid */
9155
9156/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9157FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9158/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9159FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9160/* Opcode 0xf2 0x0f 0xf3 - invalid */
9161
9162/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9163FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9164/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9165FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9166/* Opcode 0xf2 0x0f 0xf4 - invalid */
9167
9168/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9169FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9170/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9171FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9172/* Opcode 0xf2 0x0f 0xf5 - invalid */
9173
9174/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9175FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9176/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9177FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9178/* Opcode 0xf2 0x0f 0xf6 - invalid */
9179
9180/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9181FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9182/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9183FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9184/* Opcode 0xf2 0x0f 0xf7 - invalid */
9185
9186/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9187FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9188/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9189FNIEMOP_STUB(iemOp_psubb_Vx_W);
9190/* Opcode 0xf2 0x0f 0xf8 - invalid */
9191
9192/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9193FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9194/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9195FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9196/* Opcode 0xf2 0x0f 0xf9 - invalid */
9197
9198/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9199FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9200/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9201FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9202/* Opcode 0xf2 0x0f 0xfa - invalid */
9203
9204/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9205FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9206/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9207FNIEMOP_STUB(iemOp_psubq_Vx_W);
9208/* Opcode 0xf2 0x0f 0xfb - invalid */
9209
9210/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9211FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9212/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9213FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9214/* Opcode 0xf2 0x0f 0xfc - invalid */
9215
9216/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9217FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9218/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9219FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9220/* Opcode 0xf2 0x0f 0xfd - invalid */
9221
9222/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9223FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9224/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9225FNIEMOP_STUB(iemOp_paddd_Vx_W);
9226/* Opcode 0xf2 0x0f 0xfe - invalid */
9227
9228
9229/** Opcode **** 0x0f 0xff - UD0 */
9230FNIEMOP_DEF(iemOp_ud0)
9231{
9232 IEMOP_MNEMONIC(ud0, "ud0");
9233 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9234 {
9235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9236#ifndef TST_IEM_CHECK_MC
9237 RTGCPTR GCPtrEff;
9238 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9239 if (rcStrict != VINF_SUCCESS)
9240 return rcStrict;
9241#endif
9242 IEMOP_HLP_DONE_DECODING();
9243 }
9244 return IEMOP_RAISE_INVALID_OPCODE();
9245}
9246
9247
9248
9249/**
9250 * Two byte opcode map, first byte 0x0f.
9251 *
9252 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9253 * check if it needs updating as well when making changes.
9254 */
9255IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9256{
9257 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9258 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9259 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9260 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9261 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9262 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9263 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9264 /* 0x06 */ IEMOP_X4(iemOp_clts),
9265 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9266 /* 0x08 */ IEMOP_X4(iemOp_invd),
9267 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9268 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9269 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9270 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9271 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9272 /* 0x0e */ IEMOP_X4(iemOp_femms),
9273 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9274
9275 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9276 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9277 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9278 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9279 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9280 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9281 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9282 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9283 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9284 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9285 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9286 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9287 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9288 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9289 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9290 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9291
9292 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9293 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9294 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9295 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9296 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9297 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9298 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9299 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9300 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9301 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9302 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9303 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9304 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9305 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9306 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9307 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9308
9309 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9310 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9311 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9312 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9313 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9314 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9315 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9316 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9317 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9318 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9319 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9320 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9321 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9322 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9323 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9324 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9325
9326 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9327 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9328 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9329 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9330 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9331 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9332 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9333 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9334 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9335 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9336 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9337 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9338 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9339 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9340 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9341 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9342
9343 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9344 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9345 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9346 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9347 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9348 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9349 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9350 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9351 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9352 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9353 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9354 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9355 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9356 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9357 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9358 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9359
9360 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9361 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9362 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9363 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9364 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9365 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9366 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9367 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9368 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9369 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9370 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9371 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9372 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9373 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9374 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9375 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9376
9377 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9378 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9379 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9380 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9381 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9382 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9383 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9384 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9385
9386 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9387 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9388 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9389 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9390 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9391 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9392 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9393 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9394
9395 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9396 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9397 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9398 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9399 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9400 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9401 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9402 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9403 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9404 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9405 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9406 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9407 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9408 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9409 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9410 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9411
9412 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9413 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9414 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9415 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9416 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9417 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9418 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9419 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9420 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9421 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9422 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9423 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9424 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9425 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9426 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9427 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9428
9429 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9430 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9431 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9432 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9433 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9434 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9435 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9436 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9437 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9438 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9439 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9440 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9441 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9442 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9443 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9444 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9445
9446 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9447 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9448 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9449 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9450 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9451 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9452 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9453 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9454 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9455 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9456 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9457 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9458 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9459 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9460 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9461 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9462
9463 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9464 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9465 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9466 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9467 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9468 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9469 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9470 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9471 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9472 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9473 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9474 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9475 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9476 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9477 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9478 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9479
9480 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9481 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9482 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9483 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9484 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9485 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9486 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9487 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9488 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9489 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9490 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9491 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9492 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9493 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9494 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9495 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9496
9497 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9498 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9499 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9500 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9501 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9502 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9503 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9504 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9505 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9506 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9507 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9508 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9509 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9510 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9511 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9512 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9513
9514 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9515 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9516 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9517 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9518 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9519 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9520 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9521 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9522 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9523 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9524 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9525 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9526 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9527 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9528 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9529 /* 0xff */ IEMOP_X4(iemOp_ud0),
9530};
9531AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9532
9533/** @} */
9534
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette