VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 72517

Last change on this file since 72517 was 72517, checked in by vboxsync, 6 years ago

IEM: Modified wbinvd for the purpose of SVM intercepting.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 332.2 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 72517 2018-06-11 14:52:54Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
251{
252 IEMOP_BITCH_ABOUT_STUB();
253 return IEMOP_RAISE_INVALID_OPCODE();
254}
255
256
257/** Opcode 0x0f 0x01 /0. */
258FNIEMOP_DEF(iemOp_Grp7_vmresume)
259{
260 IEMOP_BITCH_ABOUT_STUB();
261 return IEMOP_RAISE_INVALID_OPCODE();
262}
263
264
265/** Opcode 0x0f 0x01 /0. */
266FNIEMOP_DEF(iemOp_Grp7_vmxoff)
267{
268 IEMOP_BITCH_ABOUT_STUB();
269 return IEMOP_RAISE_INVALID_OPCODE();
270}
271
272
273/** Opcode 0x0f 0x01 /1. */
274FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
275{
276 IEMOP_MNEMONIC(sidt, "sidt Ms");
277 IEMOP_HLP_MIN_286();
278 IEMOP_HLP_64BIT_OP_SIZE();
279 IEM_MC_BEGIN(2, 1);
280 IEM_MC_ARG(uint8_t, iEffSeg, 0);
281 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
284 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
285 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
286 IEM_MC_END();
287 return VINF_SUCCESS;
288}
289
290
291/** Opcode 0x0f 0x01 /1. */
292FNIEMOP_DEF(iemOp_Grp7_monitor)
293{
294 IEMOP_MNEMONIC(monitor, "monitor");
295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
296 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
297}
298
299
300/** Opcode 0x0f 0x01 /1. */
301FNIEMOP_DEF(iemOp_Grp7_mwait)
302{
303 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
306}
307
308
309/** Opcode 0x0f 0x01 /2. */
310FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
311{
312 IEMOP_MNEMONIC(lgdt, "lgdt");
313 IEMOP_HLP_64BIT_OP_SIZE();
314 IEM_MC_BEGIN(3, 1);
315 IEM_MC_ARG(uint8_t, iEffSeg, 0);
316 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
317 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
320 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
321 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
322 IEM_MC_END();
323 return VINF_SUCCESS;
324}
325
326
327/** Opcode 0x0f 0x01 0xd0. */
328FNIEMOP_DEF(iemOp_Grp7_xgetbv)
329{
330 IEMOP_MNEMONIC(xgetbv, "xgetbv");
331 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
332 {
333 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
334 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
335 }
336 return IEMOP_RAISE_INVALID_OPCODE();
337}
338
339
340/** Opcode 0x0f 0x01 0xd1. */
341FNIEMOP_DEF(iemOp_Grp7_xsetbv)
342{
343 IEMOP_MNEMONIC(xsetbv, "xsetbv");
344 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
345 {
346 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
347 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
348 }
349 return IEMOP_RAISE_INVALID_OPCODE();
350}
351
352
353/** Opcode 0x0f 0x01 /3. */
354FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
355{
356 IEMOP_MNEMONIC(lidt, "lidt");
357 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
358 ? IEMMODE_64BIT
359 : pVCpu->iem.s.enmEffOpSize;
360 IEM_MC_BEGIN(3, 1);
361 IEM_MC_ARG(uint8_t, iEffSeg, 0);
362 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
363 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
367 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
368 IEM_MC_END();
369 return VINF_SUCCESS;
370}
371
372
373/** Opcode 0x0f 0x01 0xd8. */
374#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
375FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
376{
377 IEMOP_MNEMONIC(vmrun, "vmrun");
378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
379 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
380}
381#else
382FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
383#endif
384
385/** Opcode 0x0f 0x01 0xd9. */
386FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
387{
388 IEMOP_MNEMONIC(vmmcall, "vmmcall");
389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
390
391 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
392 want all hypercalls regardless of instruction used, and if a
393 hypercall isn't handled by GIM or HMSvm will raise an #UD.
394 (NEM/win makes ASSUMPTIONS about this behavior.) */
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
396}
397
398/** Opcode 0x0f 0x01 0xda. */
399#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
400FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
401{
402 IEMOP_MNEMONIC(vmload, "vmload");
403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
405}
406#else
407FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
408#endif
409
410
411/** Opcode 0x0f 0x01 0xdb. */
412#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
413FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
414{
415 IEMOP_MNEMONIC(vmsave, "vmsave");
416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
417 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
418}
419#else
420FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
421#endif
422
423
424/** Opcode 0x0f 0x01 0xdc. */
425#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
426FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
427{
428 IEMOP_MNEMONIC(stgi, "stgi");
429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
430 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
431}
432#else
433FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
434#endif
435
436
437/** Opcode 0x0f 0x01 0xdd. */
438#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
439FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
440{
441 IEMOP_MNEMONIC(clgi, "clgi");
442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
443 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
444}
445#else
446FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
447#endif
448
449
450/** Opcode 0x0f 0x01 0xdf. */
451#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
452FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
453{
454 IEMOP_MNEMONIC(invlpga, "invlpga");
455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
456 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
457}
458#else
459FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
460#endif
461
462
463/** Opcode 0x0f 0x01 0xde. */
464#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
465FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
466{
467 IEMOP_MNEMONIC(skinit, "skinit");
468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
470}
471#else
472FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
473#endif
474
475
476/** Opcode 0x0f 0x01 /4. */
477FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
478{
479 IEMOP_MNEMONIC(smsw, "smsw");
480 IEMOP_HLP_MIN_286();
481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
482 {
483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
484 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
485 }
486
487 /* Ignore operand size here, memory refs are always 16-bit. */
488 IEM_MC_BEGIN(2, 0);
489 IEM_MC_ARG(uint16_t, iEffSeg, 0);
490 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
494 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
495 IEM_MC_END();
496 return VINF_SUCCESS;
497}
498
499
500/** Opcode 0x0f 0x01 /6. */
501FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
502{
503 /* The operand size is effectively ignored, all is 16-bit and only the
504 lower 3-bits are used. */
505 IEMOP_MNEMONIC(lmsw, "lmsw");
506 IEMOP_HLP_MIN_286();
507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
508 {
509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
510 IEM_MC_BEGIN(1, 0);
511 IEM_MC_ARG(uint16_t, u16Tmp, 0);
512 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
513 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
514 IEM_MC_END();
515 }
516 else
517 {
518 IEM_MC_BEGIN(1, 1);
519 IEM_MC_ARG(uint16_t, u16Tmp, 0);
520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
523 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
524 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
525 IEM_MC_END();
526 }
527 return VINF_SUCCESS;
528}
529
530
531/** Opcode 0x0f 0x01 /7. */
532FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
533{
534 IEMOP_MNEMONIC(invlpg, "invlpg");
535 IEMOP_HLP_MIN_486();
536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
537 IEM_MC_BEGIN(1, 1);
538 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
540 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
541 IEM_MC_END();
542 return VINF_SUCCESS;
543}
544
545
546/** Opcode 0x0f 0x01 /7. */
547FNIEMOP_DEF(iemOp_Grp7_swapgs)
548{
549 IEMOP_MNEMONIC(swapgs, "swapgs");
550 IEMOP_HLP_ONLY_64BIT();
551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
552 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
553}
554
555
556/** Opcode 0x0f 0x01 /7. */
557FNIEMOP_DEF(iemOp_Grp7_rdtscp)
558{
559 IEMOP_MNEMONIC(rdtscp, "rdtscp");
560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
561 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
562}
563
564
565/**
566 * Group 7 jump table, memory variant.
567 */
568IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
569{
570 iemOp_Grp7_sgdt,
571 iemOp_Grp7_sidt,
572 iemOp_Grp7_lgdt,
573 iemOp_Grp7_lidt,
574 iemOp_Grp7_smsw,
575 iemOp_InvalidWithRM,
576 iemOp_Grp7_lmsw,
577 iemOp_Grp7_invlpg
578};
579
580
581/** Opcode 0x0f 0x01. */
582FNIEMOP_DEF(iemOp_Grp7)
583{
584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
585 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
586 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
587
588 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
589 {
590 case 0:
591 switch (bRm & X86_MODRM_RM_MASK)
592 {
593 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
594 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
595 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
596 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
597 }
598 return IEMOP_RAISE_INVALID_OPCODE();
599
600 case 1:
601 switch (bRm & X86_MODRM_RM_MASK)
602 {
603 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
604 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
605 }
606 return IEMOP_RAISE_INVALID_OPCODE();
607
608 case 2:
609 switch (bRm & X86_MODRM_RM_MASK)
610 {
611 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
612 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
613 }
614 return IEMOP_RAISE_INVALID_OPCODE();
615
616 case 3:
617 switch (bRm & X86_MODRM_RM_MASK)
618 {
619 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
620 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
621 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
622 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
623 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
624 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
625 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
626 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
628 }
629
630 case 4:
631 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
632
633 case 5:
634 return IEMOP_RAISE_INVALID_OPCODE();
635
636 case 6:
637 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
638
639 case 7:
640 switch (bRm & X86_MODRM_RM_MASK)
641 {
642 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
643 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
644 }
645 return IEMOP_RAISE_INVALID_OPCODE();
646
647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
648 }
649}
650
651/** Opcode 0x0f 0x00 /3. */
652FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
653{
654 IEMOP_HLP_NO_REAL_OR_V86_MODE();
655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
656
657 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
658 {
659 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
660 switch (pVCpu->iem.s.enmEffOpSize)
661 {
662 case IEMMODE_16BIT:
663 {
664 IEM_MC_BEGIN(3, 0);
665 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 1);
667 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
668
669 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
670 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
671 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
672
673 IEM_MC_END();
674 return VINF_SUCCESS;
675 }
676
677 case IEMMODE_32BIT:
678 case IEMMODE_64BIT:
679 {
680 IEM_MC_BEGIN(3, 0);
681 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
682 IEM_MC_ARG(uint16_t, u16Sel, 1);
683 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
684
685 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
686 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
687 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
688
689 IEM_MC_END();
690 return VINF_SUCCESS;
691 }
692
693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
694 }
695 }
696 else
697 {
698 switch (pVCpu->iem.s.enmEffOpSize)
699 {
700 case IEMMODE_16BIT:
701 {
702 IEM_MC_BEGIN(3, 1);
703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
704 IEM_MC_ARG(uint16_t, u16Sel, 1);
705 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
707
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
710
711 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
712 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
713 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
714
715 IEM_MC_END();
716 return VINF_SUCCESS;
717 }
718
719 case IEMMODE_32BIT:
720 case IEMMODE_64BIT:
721 {
722 IEM_MC_BEGIN(3, 1);
723 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 1);
725 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
727
728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
729 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
730/** @todo testcase: make sure it's a 16-bit read. */
731
732 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
733 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
734 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
735
736 IEM_MC_END();
737 return VINF_SUCCESS;
738 }
739
740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
741 }
742 }
743}
744
745
746
747/** Opcode 0x0f 0x02. */
748FNIEMOP_DEF(iemOp_lar_Gv_Ew)
749{
750 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
751 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
752}
753
754
755/** Opcode 0x0f 0x03. */
756FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
757{
758 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
759 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
760}
761
762
763/** Opcode 0x0f 0x05. */
764FNIEMOP_DEF(iemOp_syscall)
765{
766 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
768 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
769}
770
771
772/** Opcode 0x0f 0x06. */
773FNIEMOP_DEF(iemOp_clts)
774{
775 IEMOP_MNEMONIC(clts, "clts");
776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
777 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
778}
779
780
781/** Opcode 0x0f 0x07. */
782FNIEMOP_DEF(iemOp_sysret)
783{
784 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
786 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
787}
788
789
790/** Opcode 0x0f 0x08. */
791FNIEMOP_DEF(iemOp_invd)
792{
793 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
794 IEMOP_HLP_MIN_486();
795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
796 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
797}
798
799
800/** Opcode 0x0f 0x09. */
801FNIEMOP_DEF(iemOp_wbinvd)
802{
803 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
804 IEMOP_HLP_MIN_486();
805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
806 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
807}
808
809
810/** Opcode 0x0f 0x0b. */
811FNIEMOP_DEF(iemOp_ud2)
812{
813 IEMOP_MNEMONIC(ud2, "ud2");
814 return IEMOP_RAISE_INVALID_OPCODE();
815}
816
817/** Opcode 0x0f 0x0d. */
818FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
819{
820 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
821 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
822 {
823 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
824 return IEMOP_RAISE_INVALID_OPCODE();
825 }
826
827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
828 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
829 {
830 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
831 return IEMOP_RAISE_INVALID_OPCODE();
832 }
833
834 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
835 {
836 case 2: /* Aliased to /0 for the time being. */
837 case 4: /* Aliased to /0 for the time being. */
838 case 5: /* Aliased to /0 for the time being. */
839 case 6: /* Aliased to /0 for the time being. */
840 case 7: /* Aliased to /0 for the time being. */
841 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
842 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
843 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
845 }
846
847 IEM_MC_BEGIN(0, 1);
848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
851 /* Currently a NOP. */
852 NOREF(GCPtrEffSrc);
853 IEM_MC_ADVANCE_RIP();
854 IEM_MC_END();
855 return VINF_SUCCESS;
856}
857
858
859/** Opcode 0x0f 0x0e. */
860FNIEMOP_DEF(iemOp_femms)
861{
862 IEMOP_MNEMONIC(femms, "femms");
863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
864
865 IEM_MC_BEGIN(0,0);
866 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
867 IEM_MC_MAYBE_RAISE_FPU_XCPT();
868 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
869 IEM_MC_FPU_FROM_MMX_MODE();
870 IEM_MC_ADVANCE_RIP();
871 IEM_MC_END();
872 return VINF_SUCCESS;
873}
874
875
876/** Opcode 0x0f 0x0f. */
877FNIEMOP_DEF(iemOp_3Dnow)
878{
879 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
880 {
881 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
882 return IEMOP_RAISE_INVALID_OPCODE();
883 }
884
885#ifdef IEM_WITH_3DNOW
886 /* This is pretty sparse, use switch instead of table. */
887 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
888 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
889#else
890 IEMOP_BITCH_ABOUT_STUB();
891 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
892#endif
893}
894
895
896/**
897 * @opcode 0x10
898 * @oppfx none
899 * @opcpuid sse
900 * @opgroup og_sse_simdfp_datamove
901 * @opxcpttype 4UA
902 * @optest op1=1 op2=2 -> op1=2
903 * @optest op1=0 op2=-22 -> op1=-22
904 */
905FNIEMOP_DEF(iemOp_movups_Vps_Wps)
906{
907 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
910 {
911 /*
912 * Register, register.
913 */
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
915 IEM_MC_BEGIN(0, 0);
916 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
917 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
918 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
919 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
920 IEM_MC_ADVANCE_RIP();
921 IEM_MC_END();
922 }
923 else
924 {
925 /*
926 * Memory, register.
927 */
928 IEM_MC_BEGIN(0, 2);
929 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
931
932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
934 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
935 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
936
937 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
938 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
939
940 IEM_MC_ADVANCE_RIP();
941 IEM_MC_END();
942 }
943 return VINF_SUCCESS;
944
945}
946
947
948/**
949 * @opcode 0x10
950 * @oppfx 0x66
951 * @opcpuid sse2
952 * @opgroup og_sse2_pcksclr_datamove
953 * @opxcpttype 4UA
954 * @optest op1=1 op2=2 -> op1=2
955 * @optest op1=0 op2=-42 -> op1=-42
956 */
957FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
958{
959 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
962 {
963 /*
964 * Register, register.
965 */
966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
967 IEM_MC_BEGIN(0, 0);
968 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
969 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
970 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
971 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
972 IEM_MC_ADVANCE_RIP();
973 IEM_MC_END();
974 }
975 else
976 {
977 /*
978 * Memory, register.
979 */
980 IEM_MC_BEGIN(0, 2);
981 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
983
984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
986 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
987 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
988
989 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
990 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
991
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 }
995 return VINF_SUCCESS;
996}
997
998
999/**
1000 * @opcode 0x10
1001 * @oppfx 0xf3
1002 * @opcpuid sse
1003 * @opgroup og_sse_simdfp_datamove
1004 * @opxcpttype 5
1005 * @optest op1=1 op2=2 -> op1=2
1006 * @optest op1=0 op2=-22 -> op1=-22
1007 */
1008FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1009{
1010 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1012 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1013 {
1014 /*
1015 * Register, register.
1016 */
1017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1018 IEM_MC_BEGIN(0, 1);
1019 IEM_MC_LOCAL(uint32_t, uSrc);
1020
1021 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1022 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1023 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1024 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1025
1026 IEM_MC_ADVANCE_RIP();
1027 IEM_MC_END();
1028 }
1029 else
1030 {
1031 /*
1032 * Memory, register.
1033 */
1034 IEM_MC_BEGIN(0, 2);
1035 IEM_MC_LOCAL(uint32_t, uSrc);
1036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1037
1038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1040 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1041 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1042
1043 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1044 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1045
1046 IEM_MC_ADVANCE_RIP();
1047 IEM_MC_END();
1048 }
1049 return VINF_SUCCESS;
1050}
1051
1052
1053/**
1054 * @opcode 0x10
1055 * @oppfx 0xf2
1056 * @opcpuid sse2
1057 * @opgroup og_sse2_pcksclr_datamove
1058 * @opxcpttype 5
1059 * @optest op1=1 op2=2 -> op1=2
1060 * @optest op1=0 op2=-42 -> op1=-42
1061 */
1062FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1063{
1064 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1067 {
1068 /*
1069 * Register, register.
1070 */
1071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1072 IEM_MC_BEGIN(0, 1);
1073 IEM_MC_LOCAL(uint64_t, uSrc);
1074
1075 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1076 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1077 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1078 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1079
1080 IEM_MC_ADVANCE_RIP();
1081 IEM_MC_END();
1082 }
1083 else
1084 {
1085 /*
1086 * Memory, register.
1087 */
1088 IEM_MC_BEGIN(0, 2);
1089 IEM_MC_LOCAL(uint64_t, uSrc);
1090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1091
1092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1094 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1095 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1096
1097 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1098 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1099
1100 IEM_MC_ADVANCE_RIP();
1101 IEM_MC_END();
1102 }
1103 return VINF_SUCCESS;
1104}
1105
1106
1107/**
1108 * @opcode 0x11
1109 * @oppfx none
1110 * @opcpuid sse
1111 * @opgroup og_sse_simdfp_datamove
1112 * @opxcpttype 4UA
1113 * @optest op1=1 op2=2 -> op1=2
1114 * @optest op1=0 op2=-42 -> op1=-42
1115 */
1116FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1117{
1118 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1121 {
1122 /*
1123 * Register, register.
1124 */
1125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1126 IEM_MC_BEGIN(0, 0);
1127 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1128 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1129 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1130 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1131 IEM_MC_ADVANCE_RIP();
1132 IEM_MC_END();
1133 }
1134 else
1135 {
1136 /*
1137 * Memory, register.
1138 */
1139 IEM_MC_BEGIN(0, 2);
1140 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1142
1143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1145 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1146 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1147
1148 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1149 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1150
1151 IEM_MC_ADVANCE_RIP();
1152 IEM_MC_END();
1153 }
1154 return VINF_SUCCESS;
1155}
1156
1157
1158/**
1159 * @opcode 0x11
1160 * @oppfx 0x66
1161 * @opcpuid sse2
1162 * @opgroup og_sse2_pcksclr_datamove
1163 * @opxcpttype 4UA
1164 * @optest op1=1 op2=2 -> op1=2
1165 * @optest op1=0 op2=-42 -> op1=-42
1166 */
1167FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1168{
1169 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1172 {
1173 /*
1174 * Register, register.
1175 */
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1177 IEM_MC_BEGIN(0, 0);
1178 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1179 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1180 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1181 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1182 IEM_MC_ADVANCE_RIP();
1183 IEM_MC_END();
1184 }
1185 else
1186 {
1187 /*
1188 * Memory, register.
1189 */
1190 IEM_MC_BEGIN(0, 2);
1191 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1193
1194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1196 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1197 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1198
1199 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1200 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1201
1202 IEM_MC_ADVANCE_RIP();
1203 IEM_MC_END();
1204 }
1205 return VINF_SUCCESS;
1206}
1207
1208
1209/**
1210 * @opcode 0x11
1211 * @oppfx 0xf3
1212 * @opcpuid sse
1213 * @opgroup og_sse_simdfp_datamove
1214 * @opxcpttype 5
1215 * @optest op1=1 op2=2 -> op1=2
1216 * @optest op1=0 op2=-22 -> op1=-22
1217 */
1218FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1219{
1220 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1222 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1223 {
1224 /*
1225 * Register, register.
1226 */
1227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1228 IEM_MC_BEGIN(0, 1);
1229 IEM_MC_LOCAL(uint32_t, uSrc);
1230
1231 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1232 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1233 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1234 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1235
1236 IEM_MC_ADVANCE_RIP();
1237 IEM_MC_END();
1238 }
1239 else
1240 {
1241 /*
1242 * Memory, register.
1243 */
1244 IEM_MC_BEGIN(0, 2);
1245 IEM_MC_LOCAL(uint32_t, uSrc);
1246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1247
1248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1250 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1251 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1252
1253 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1254 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1255
1256 IEM_MC_ADVANCE_RIP();
1257 IEM_MC_END();
1258 }
1259 return VINF_SUCCESS;
1260}
1261
1262
1263/**
1264 * @opcode 0x11
1265 * @oppfx 0xf2
1266 * @opcpuid sse2
1267 * @opgroup og_sse2_pcksclr_datamove
1268 * @opxcpttype 5
1269 * @optest op1=1 op2=2 -> op1=2
1270 * @optest op1=0 op2=-42 -> op1=-42
1271 */
1272FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1273{
1274 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1277 {
1278 /*
1279 * Register, register.
1280 */
1281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1282 IEM_MC_BEGIN(0, 1);
1283 IEM_MC_LOCAL(uint64_t, uSrc);
1284
1285 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1286 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1287 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1288 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1289
1290 IEM_MC_ADVANCE_RIP();
1291 IEM_MC_END();
1292 }
1293 else
1294 {
1295 /*
1296 * Memory, register.
1297 */
1298 IEM_MC_BEGIN(0, 2);
1299 IEM_MC_LOCAL(uint64_t, uSrc);
1300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1301
1302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1304 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1305 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1306
1307 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1308 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1309
1310 IEM_MC_ADVANCE_RIP();
1311 IEM_MC_END();
1312 }
1313 return VINF_SUCCESS;
1314}
1315
1316
1317FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1318{
1319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1320 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1321 {
1322 /**
1323 * @opcode 0x12
1324 * @opcodesub 11 mr/reg
1325 * @oppfx none
1326 * @opcpuid sse
1327 * @opgroup og_sse_simdfp_datamove
1328 * @opxcpttype 5
1329 * @optest op1=1 op2=2 -> op1=2
1330 * @optest op1=0 op2=-42 -> op1=-42
1331 */
1332 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1333
1334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1335 IEM_MC_BEGIN(0, 1);
1336 IEM_MC_LOCAL(uint64_t, uSrc);
1337
1338 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1339 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1340 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1341 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1342
1343 IEM_MC_ADVANCE_RIP();
1344 IEM_MC_END();
1345 }
1346 else
1347 {
1348 /**
1349 * @opdone
1350 * @opcode 0x12
1351 * @opcodesub !11 mr/reg
1352 * @oppfx none
1353 * @opcpuid sse
1354 * @opgroup og_sse_simdfp_datamove
1355 * @opxcpttype 5
1356 * @optest op1=1 op2=2 -> op1=2
1357 * @optest op1=0 op2=-42 -> op1=-42
1358 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1359 */
1360 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1361
1362 IEM_MC_BEGIN(0, 2);
1363 IEM_MC_LOCAL(uint64_t, uSrc);
1364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1365
1366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1368 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1369 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1370
1371 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1372 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1373
1374 IEM_MC_ADVANCE_RIP();
1375 IEM_MC_END();
1376 }
1377 return VINF_SUCCESS;
1378}
1379
1380
1381/**
1382 * @opcode 0x12
1383 * @opcodesub !11 mr/reg
1384 * @oppfx 0x66
1385 * @opcpuid sse2
1386 * @opgroup og_sse2_pcksclr_datamove
1387 * @opxcpttype 5
1388 * @optest op1=1 op2=2 -> op1=2
1389 * @optest op1=0 op2=-42 -> op1=-42
1390 */
1391FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1392{
1393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1394 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1395 {
1396 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1397
1398 IEM_MC_BEGIN(0, 2);
1399 IEM_MC_LOCAL(uint64_t, uSrc);
1400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1401
1402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1404 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1405 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1406
1407 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1408 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1409
1410 IEM_MC_ADVANCE_RIP();
1411 IEM_MC_END();
1412 return VINF_SUCCESS;
1413 }
1414
1415 /**
1416 * @opdone
1417 * @opmnemonic ud660f12m3
1418 * @opcode 0x12
1419 * @opcodesub 11 mr/reg
1420 * @oppfx 0x66
1421 * @opunused immediate
1422 * @opcpuid sse
1423 * @optest ->
1424 */
1425 return IEMOP_RAISE_INVALID_OPCODE();
1426}
1427
1428
1429/**
1430 * @opcode 0x12
1431 * @oppfx 0xf3
1432 * @opcpuid sse3
1433 * @opgroup og_sse3_pcksclr_datamove
1434 * @opxcpttype 4
1435 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1436 * op1=0x00000002000000020000000100000001
1437 */
1438FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1439{
1440 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1443 {
1444 /*
1445 * Register, register.
1446 */
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(2, 0);
1449 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1450 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1451
1452 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1453 IEM_MC_PREPARE_SSE_USAGE();
1454
1455 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1456 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1457 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1458
1459 IEM_MC_ADVANCE_RIP();
1460 IEM_MC_END();
1461 }
1462 else
1463 {
1464 /*
1465 * Register, memory.
1466 */
1467 IEM_MC_BEGIN(2, 2);
1468 IEM_MC_LOCAL(RTUINT128U, uSrc);
1469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1470 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1471 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1472
1473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1475 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1476 IEM_MC_PREPARE_SSE_USAGE();
1477
1478 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1479 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1480 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1481
1482 IEM_MC_ADVANCE_RIP();
1483 IEM_MC_END();
1484 }
1485 return VINF_SUCCESS;
1486}
1487
1488
1489/**
1490 * @opcode 0x12
1491 * @oppfx 0xf2
1492 * @opcpuid sse3
1493 * @opgroup og_sse3_pcksclr_datamove
1494 * @opxcpttype 5
1495 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1496 * op1=0x22222222111111112222222211111111
1497 */
1498FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1499{
1500 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1503 {
1504 /*
1505 * Register, register.
1506 */
1507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1508 IEM_MC_BEGIN(2, 0);
1509 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1510 IEM_MC_ARG(uint64_t, uSrc, 1);
1511
1512 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1513 IEM_MC_PREPARE_SSE_USAGE();
1514
1515 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1516 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1517 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1518
1519 IEM_MC_ADVANCE_RIP();
1520 IEM_MC_END();
1521 }
1522 else
1523 {
1524 /*
1525 * Register, memory.
1526 */
1527 IEM_MC_BEGIN(2, 2);
1528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1529 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1530 IEM_MC_ARG(uint64_t, uSrc, 1);
1531
1532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1534 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1535 IEM_MC_PREPARE_SSE_USAGE();
1536
1537 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1538 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1539 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1540
1541 IEM_MC_ADVANCE_RIP();
1542 IEM_MC_END();
1543 }
1544 return VINF_SUCCESS;
1545}
1546
1547
1548/**
1549 * @opcode 0x13
1550 * @opcodesub !11 mr/reg
1551 * @oppfx none
1552 * @opcpuid sse
1553 * @opgroup og_sse_simdfp_datamove
1554 * @opxcpttype 5
1555 * @optest op1=1 op2=2 -> op1=2
1556 * @optest op1=0 op2=-42 -> op1=-42
1557 */
1558FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1559{
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1562 {
1563 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1564
1565 IEM_MC_BEGIN(0, 2);
1566 IEM_MC_LOCAL(uint64_t, uSrc);
1567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1572 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1573
1574 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1575 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1576
1577 IEM_MC_ADVANCE_RIP();
1578 IEM_MC_END();
1579 return VINF_SUCCESS;
1580 }
1581
1582 /**
1583 * @opdone
1584 * @opmnemonic ud0f13m3
1585 * @opcode 0x13
1586 * @opcodesub 11 mr/reg
1587 * @oppfx none
1588 * @opunused immediate
1589 * @opcpuid sse
1590 * @optest ->
1591 */
1592 return IEMOP_RAISE_INVALID_OPCODE();
1593}
1594
1595
1596/**
1597 * @opcode 0x13
1598 * @opcodesub !11 mr/reg
1599 * @oppfx 0x66
1600 * @opcpuid sse2
1601 * @opgroup og_sse2_pcksclr_datamove
1602 * @opxcpttype 5
1603 * @optest op1=1 op2=2 -> op1=2
1604 * @optest op1=0 op2=-42 -> op1=-42
1605 */
1606FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1607{
1608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1609 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1610 {
1611 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1612 IEM_MC_BEGIN(0, 2);
1613 IEM_MC_LOCAL(uint64_t, uSrc);
1614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1615
1616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1618 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1619 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1620
1621 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1622 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1623
1624 IEM_MC_ADVANCE_RIP();
1625 IEM_MC_END();
1626 return VINF_SUCCESS;
1627 }
1628
1629 /**
1630 * @opdone
1631 * @opmnemonic ud660f13m3
1632 * @opcode 0x13
1633 * @opcodesub 11 mr/reg
1634 * @oppfx 0x66
1635 * @opunused immediate
1636 * @opcpuid sse
1637 * @optest ->
1638 */
1639 return IEMOP_RAISE_INVALID_OPCODE();
1640}
1641
1642
1643/**
1644 * @opmnemonic udf30f13
1645 * @opcode 0x13
1646 * @oppfx 0xf3
1647 * @opunused intel-modrm
1648 * @opcpuid sse
1649 * @optest ->
1650 * @opdone
1651 */
1652
1653/**
1654 * @opmnemonic udf20f13
1655 * @opcode 0x13
1656 * @oppfx 0xf2
1657 * @opunused intel-modrm
1658 * @opcpuid sse
1659 * @optest ->
1660 * @opdone
1661 */
1662
1663/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1664FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1665/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1666FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1667
1668/**
1669 * @opdone
1670 * @opmnemonic udf30f14
1671 * @opcode 0x14
1672 * @oppfx 0xf3
1673 * @opunused intel-modrm
1674 * @opcpuid sse
1675 * @optest ->
1676 * @opdone
1677 */
1678
1679/**
1680 * @opmnemonic udf20f14
1681 * @opcode 0x14
1682 * @oppfx 0xf2
1683 * @opunused intel-modrm
1684 * @opcpuid sse
1685 * @optest ->
1686 * @opdone
1687 */
1688
1689/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1690FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1691/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1692FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1693/* Opcode 0xf3 0x0f 0x15 - invalid */
1694/* Opcode 0xf2 0x0f 0x15 - invalid */
1695
1696/**
1697 * @opdone
1698 * @opmnemonic udf30f15
1699 * @opcode 0x15
1700 * @oppfx 0xf3
1701 * @opunused intel-modrm
1702 * @opcpuid sse
1703 * @optest ->
1704 * @opdone
1705 */
1706
1707/**
1708 * @opmnemonic udf20f15
1709 * @opcode 0x15
1710 * @oppfx 0xf2
1711 * @opunused intel-modrm
1712 * @opcpuid sse
1713 * @optest ->
1714 * @opdone
1715 */
1716
1717FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1718{
1719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1721 {
1722 /**
1723 * @opcode 0x16
1724 * @opcodesub 11 mr/reg
1725 * @oppfx none
1726 * @opcpuid sse
1727 * @opgroup og_sse_simdfp_datamove
1728 * @opxcpttype 5
1729 * @optest op1=1 op2=2 -> op1=2
1730 * @optest op1=0 op2=-42 -> op1=-42
1731 */
1732 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1733
1734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1735 IEM_MC_BEGIN(0, 1);
1736 IEM_MC_LOCAL(uint64_t, uSrc);
1737
1738 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1739 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1740 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1741 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1742
1743 IEM_MC_ADVANCE_RIP();
1744 IEM_MC_END();
1745 }
1746 else
1747 {
1748 /**
1749 * @opdone
1750 * @opcode 0x16
1751 * @opcodesub !11 mr/reg
1752 * @oppfx none
1753 * @opcpuid sse
1754 * @opgroup og_sse_simdfp_datamove
1755 * @opxcpttype 5
1756 * @optest op1=1 op2=2 -> op1=2
1757 * @optest op1=0 op2=-42 -> op1=-42
1758 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1759 */
1760 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1761
1762 IEM_MC_BEGIN(0, 2);
1763 IEM_MC_LOCAL(uint64_t, uSrc);
1764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1765
1766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1768 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1769 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1770
1771 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1772 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1773
1774 IEM_MC_ADVANCE_RIP();
1775 IEM_MC_END();
1776 }
1777 return VINF_SUCCESS;
1778}
1779
1780
1781/**
1782 * @opcode 0x16
1783 * @opcodesub !11 mr/reg
1784 * @oppfx 0x66
1785 * @opcpuid sse2
1786 * @opgroup og_sse2_pcksclr_datamove
1787 * @opxcpttype 5
1788 * @optest op1=1 op2=2 -> op1=2
1789 * @optest op1=0 op2=-42 -> op1=-42
1790 */
1791FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1792{
1793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1794 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1795 {
1796 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1797 IEM_MC_BEGIN(0, 2);
1798 IEM_MC_LOCAL(uint64_t, uSrc);
1799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1800
1801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1803 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1804 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1805
1806 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1807 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1808
1809 IEM_MC_ADVANCE_RIP();
1810 IEM_MC_END();
1811 return VINF_SUCCESS;
1812 }
1813
1814 /**
1815 * @opdone
1816 * @opmnemonic ud660f16m3
1817 * @opcode 0x16
1818 * @opcodesub 11 mr/reg
1819 * @oppfx 0x66
1820 * @opunused immediate
1821 * @opcpuid sse
1822 * @optest ->
1823 */
1824 return IEMOP_RAISE_INVALID_OPCODE();
1825}
1826
1827
1828/**
1829 * @opcode 0x16
1830 * @oppfx 0xf3
1831 * @opcpuid sse3
1832 * @opgroup og_sse3_pcksclr_datamove
1833 * @opxcpttype 4
1834 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1835 * op1=0x00000002000000020000000100000001
1836 */
1837FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1838{
1839 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1842 {
1843 /*
1844 * Register, register.
1845 */
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 IEM_MC_BEGIN(2, 0);
1848 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1849 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1850
1851 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1852 IEM_MC_PREPARE_SSE_USAGE();
1853
1854 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1855 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1856 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1857
1858 IEM_MC_ADVANCE_RIP();
1859 IEM_MC_END();
1860 }
1861 else
1862 {
1863 /*
1864 * Register, memory.
1865 */
1866 IEM_MC_BEGIN(2, 2);
1867 IEM_MC_LOCAL(RTUINT128U, uSrc);
1868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1869 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1870 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1871
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1875 IEM_MC_PREPARE_SSE_USAGE();
1876
1877 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1878 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1879 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1880
1881 IEM_MC_ADVANCE_RIP();
1882 IEM_MC_END();
1883 }
1884 return VINF_SUCCESS;
1885}
1886
1887/**
1888 * @opdone
1889 * @opmnemonic udf30f16
1890 * @opcode 0x16
1891 * @oppfx 0xf2
1892 * @opunused intel-modrm
1893 * @opcpuid sse
1894 * @optest ->
1895 * @opdone
1896 */
1897
1898
1899/**
1900 * @opcode 0x17
1901 * @opcodesub !11 mr/reg
1902 * @oppfx none
1903 * @opcpuid sse
1904 * @opgroup og_sse_simdfp_datamove
1905 * @opxcpttype 5
1906 * @optest op1=1 op2=2 -> op1=2
1907 * @optest op1=0 op2=-42 -> op1=-42
1908 */
1909FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1910{
1911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1912 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1913 {
1914 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1915
1916 IEM_MC_BEGIN(0, 2);
1917 IEM_MC_LOCAL(uint64_t, uSrc);
1918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1919
1920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1922 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1923 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1924
1925 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1926 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1927
1928 IEM_MC_ADVANCE_RIP();
1929 IEM_MC_END();
1930 return VINF_SUCCESS;
1931 }
1932
1933 /**
1934 * @opdone
1935 * @opmnemonic ud0f17m3
1936 * @opcode 0x17
1937 * @opcodesub 11 mr/reg
1938 * @oppfx none
1939 * @opunused immediate
1940 * @opcpuid sse
1941 * @optest ->
1942 */
1943 return IEMOP_RAISE_INVALID_OPCODE();
1944}
1945
1946
1947/**
1948 * @opcode 0x17
1949 * @opcodesub !11 mr/reg
1950 * @oppfx 0x66
1951 * @opcpuid sse2
1952 * @opgroup og_sse2_pcksclr_datamove
1953 * @opxcpttype 5
1954 * @optest op1=1 op2=2 -> op1=2
1955 * @optest op1=0 op2=-42 -> op1=-42
1956 */
1957FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1958{
1959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1960 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1961 {
1962 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1963
1964 IEM_MC_BEGIN(0, 2);
1965 IEM_MC_LOCAL(uint64_t, uSrc);
1966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1971 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1972
1973 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1974 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1975
1976 IEM_MC_ADVANCE_RIP();
1977 IEM_MC_END();
1978 return VINF_SUCCESS;
1979 }
1980
1981 /**
1982 * @opdone
1983 * @opmnemonic ud660f17m3
1984 * @opcode 0x17
1985 * @opcodesub 11 mr/reg
1986 * @oppfx 0x66
1987 * @opunused immediate
1988 * @opcpuid sse
1989 * @optest ->
1990 */
1991 return IEMOP_RAISE_INVALID_OPCODE();
1992}
1993
1994
1995/**
1996 * @opdone
1997 * @opmnemonic udf30f17
1998 * @opcode 0x17
1999 * @oppfx 0xf3
2000 * @opunused intel-modrm
2001 * @opcpuid sse
2002 * @optest ->
2003 * @opdone
2004 */
2005
2006/**
2007 * @opmnemonic udf20f17
2008 * @opcode 0x17
2009 * @oppfx 0xf2
2010 * @opunused intel-modrm
2011 * @opcpuid sse
2012 * @optest ->
2013 * @opdone
2014 */
2015
2016
2017/** Opcode 0x0f 0x18. */
2018FNIEMOP_DEF(iemOp_prefetch_Grp16)
2019{
2020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2021 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2022 {
2023 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2024 {
2025 case 4: /* Aliased to /0 for the time being according to AMD. */
2026 case 5: /* Aliased to /0 for the time being according to AMD. */
2027 case 6: /* Aliased to /0 for the time being according to AMD. */
2028 case 7: /* Aliased to /0 for the time being according to AMD. */
2029 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2030 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2031 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2032 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2034 }
2035
2036 IEM_MC_BEGIN(0, 1);
2037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2040 /* Currently a NOP. */
2041 NOREF(GCPtrEffSrc);
2042 IEM_MC_ADVANCE_RIP();
2043 IEM_MC_END();
2044 return VINF_SUCCESS;
2045 }
2046
2047 return IEMOP_RAISE_INVALID_OPCODE();
2048}
2049
2050
2051/** Opcode 0x0f 0x19..0x1f. */
2052FNIEMOP_DEF(iemOp_nop_Ev)
2053{
2054 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2059 IEM_MC_BEGIN(0, 0);
2060 IEM_MC_ADVANCE_RIP();
2061 IEM_MC_END();
2062 }
2063 else
2064 {
2065 IEM_MC_BEGIN(0, 1);
2066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2069 /* Currently a NOP. */
2070 NOREF(GCPtrEffSrc);
2071 IEM_MC_ADVANCE_RIP();
2072 IEM_MC_END();
2073 }
2074 return VINF_SUCCESS;
2075}
2076
2077
2078/** Opcode 0x0f 0x20. */
2079FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2080{
2081 /* mod is ignored, as is operand size overrides. */
2082 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2083 IEMOP_HLP_MIN_386();
2084 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2085 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2086 else
2087 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2088
2089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2090 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2091 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2092 {
2093 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2094 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2095 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2096 iCrReg |= 8;
2097 }
2098 switch (iCrReg)
2099 {
2100 case 0: case 2: case 3: case 4: case 8:
2101 break;
2102 default:
2103 return IEMOP_RAISE_INVALID_OPCODE();
2104 }
2105 IEMOP_HLP_DONE_DECODING();
2106
2107 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2108}
2109
2110
2111/** Opcode 0x0f 0x21. */
2112FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2113{
2114 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2115 IEMOP_HLP_MIN_386();
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2118 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2119 return IEMOP_RAISE_INVALID_OPCODE();
2120 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2121 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2122 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2123}
2124
2125
2126/** Opcode 0x0f 0x22. */
2127FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2128{
2129 /* mod is ignored, as is operand size overrides. */
2130 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2131 IEMOP_HLP_MIN_386();
2132 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2133 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2134 else
2135 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2136
2137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2138 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2139 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2140 {
2141 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2142 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2143 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2144 iCrReg |= 8;
2145 }
2146 switch (iCrReg)
2147 {
2148 case 0: case 2: case 3: case 4: case 8:
2149 break;
2150 default:
2151 return IEMOP_RAISE_INVALID_OPCODE();
2152 }
2153 IEMOP_HLP_DONE_DECODING();
2154
2155 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2156}
2157
2158
2159/** Opcode 0x0f 0x23. */
2160FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2161{
2162 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2163 IEMOP_HLP_MIN_386();
2164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2166 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2167 return IEMOP_RAISE_INVALID_OPCODE();
2168 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2169 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2170 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2171}
2172
2173
2174/** Opcode 0x0f 0x24. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Td)
2176{
2177 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2178 /** @todo works on 386 and 486. */
2179 /* The RM byte is not considered, see testcase. */
2180 return IEMOP_RAISE_INVALID_OPCODE();
2181}
2182
2183
2184/** Opcode 0x0f 0x26. */
2185FNIEMOP_DEF(iemOp_mov_Td_Rd)
2186{
2187 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2188 /** @todo works on 386 and 486. */
2189 /* The RM byte is not considered, see testcase. */
2190 return IEMOP_RAISE_INVALID_OPCODE();
2191}
2192
2193
2194/**
2195 * @opcode 0x28
2196 * @oppfx none
2197 * @opcpuid sse
2198 * @opgroup og_sse_simdfp_datamove
2199 * @opxcpttype 1
2200 * @optest op1=1 op2=2 -> op1=2
2201 * @optest op1=0 op2=-42 -> op1=-42
2202 */
2203FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2204{
2205 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2207 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2208 {
2209 /*
2210 * Register, register.
2211 */
2212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2213 IEM_MC_BEGIN(0, 0);
2214 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2215 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2216 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2217 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2218 IEM_MC_ADVANCE_RIP();
2219 IEM_MC_END();
2220 }
2221 else
2222 {
2223 /*
2224 * Register, memory.
2225 */
2226 IEM_MC_BEGIN(0, 2);
2227 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2229
2230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2233 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2234
2235 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2236 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2237
2238 IEM_MC_ADVANCE_RIP();
2239 IEM_MC_END();
2240 }
2241 return VINF_SUCCESS;
2242}
2243
2244/**
2245 * @opcode 0x28
2246 * @oppfx 66
2247 * @opcpuid sse2
2248 * @opgroup og_sse2_pcksclr_datamove
2249 * @opxcpttype 1
2250 * @optest op1=1 op2=2 -> op1=2
2251 * @optest op1=0 op2=-42 -> op1=-42
2252 */
2253FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2254{
2255 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2258 {
2259 /*
2260 * Register, register.
2261 */
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2263 IEM_MC_BEGIN(0, 0);
2264 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2265 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2266 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2267 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 else
2272 {
2273 /*
2274 * Register, memory.
2275 */
2276 IEM_MC_BEGIN(0, 2);
2277 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2279
2280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2282 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2283 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2284
2285 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2286 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2287
2288 IEM_MC_ADVANCE_RIP();
2289 IEM_MC_END();
2290 }
2291 return VINF_SUCCESS;
2292}
2293
2294/* Opcode 0xf3 0x0f 0x28 - invalid */
2295/* Opcode 0xf2 0x0f 0x28 - invalid */
2296
2297/**
2298 * @opcode 0x29
2299 * @oppfx none
2300 * @opcpuid sse
2301 * @opgroup og_sse_simdfp_datamove
2302 * @opxcpttype 1
2303 * @optest op1=1 op2=2 -> op1=2
2304 * @optest op1=0 op2=-42 -> op1=-42
2305 */
2306FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2307{
2308 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2311 {
2312 /*
2313 * Register, register.
2314 */
2315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2316 IEM_MC_BEGIN(0, 0);
2317 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2318 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2319 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2320 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2321 IEM_MC_ADVANCE_RIP();
2322 IEM_MC_END();
2323 }
2324 else
2325 {
2326 /*
2327 * Memory, register.
2328 */
2329 IEM_MC_BEGIN(0, 2);
2330 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2332
2333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2335 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2336 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2337
2338 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2339 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2340
2341 IEM_MC_ADVANCE_RIP();
2342 IEM_MC_END();
2343 }
2344 return VINF_SUCCESS;
2345}
2346
2347/**
2348 * @opcode 0x29
2349 * @oppfx 66
2350 * @opcpuid sse2
2351 * @opgroup og_sse2_pcksclr_datamove
2352 * @opxcpttype 1
2353 * @optest op1=1 op2=2 -> op1=2
2354 * @optest op1=0 op2=-42 -> op1=-42
2355 */
2356FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2357{
2358 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2361 {
2362 /*
2363 * Register, register.
2364 */
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_BEGIN(0, 0);
2367 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2368 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2369 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2370 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2371 IEM_MC_ADVANCE_RIP();
2372 IEM_MC_END();
2373 }
2374 else
2375 {
2376 /*
2377 * Memory, register.
2378 */
2379 IEM_MC_BEGIN(0, 2);
2380 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2382
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2385 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2386 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2387
2388 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2389 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2390
2391 IEM_MC_ADVANCE_RIP();
2392 IEM_MC_END();
2393 }
2394 return VINF_SUCCESS;
2395}
2396
2397/* Opcode 0xf3 0x0f 0x29 - invalid */
2398/* Opcode 0xf2 0x0f 0x29 - invalid */
2399
2400
2401/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2402FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2403/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2404FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2405/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2406FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2407/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2408FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2409
2410
2411/**
2412 * @opcode 0x2b
2413 * @opcodesub !11 mr/reg
2414 * @oppfx none
2415 * @opcpuid sse
2416 * @opgroup og_sse1_cachect
2417 * @opxcpttype 1
2418 * @optest op1=1 op2=2 -> op1=2
2419 * @optest op1=0 op2=-42 -> op1=-42
2420 */
2421FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2422{
2423 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2425 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2426 {
2427 /*
2428 * memory, register.
2429 */
2430 IEM_MC_BEGIN(0, 2);
2431 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2433
2434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2436 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2437 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2438
2439 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2440 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2441
2442 IEM_MC_ADVANCE_RIP();
2443 IEM_MC_END();
2444 }
2445 /* The register, register encoding is invalid. */
2446 else
2447 return IEMOP_RAISE_INVALID_OPCODE();
2448 return VINF_SUCCESS;
2449}
2450
2451/**
2452 * @opcode 0x2b
2453 * @opcodesub !11 mr/reg
2454 * @oppfx 0x66
2455 * @opcpuid sse2
2456 * @opgroup og_sse2_cachect
2457 * @opxcpttype 1
2458 * @optest op1=1 op2=2 -> op1=2
2459 * @optest op1=0 op2=-42 -> op1=-42
2460 */
2461FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2462{
2463 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2465 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2466 {
2467 /*
2468 * memory, register.
2469 */
2470 IEM_MC_BEGIN(0, 2);
2471 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2473
2474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2478
2479 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2480 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2481
2482 IEM_MC_ADVANCE_RIP();
2483 IEM_MC_END();
2484 }
2485 /* The register, register encoding is invalid. */
2486 else
2487 return IEMOP_RAISE_INVALID_OPCODE();
2488 return VINF_SUCCESS;
2489}
2490/* Opcode 0xf3 0x0f 0x2b - invalid */
2491/* Opcode 0xf2 0x0f 0x2b - invalid */
2492
2493
2494/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2495FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2496/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2497FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2498/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2499FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2500/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2501FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2502
2503/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2504FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2505/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2506FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2507/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2508FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2509/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2510FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2511
2512/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2513FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2514/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2515FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2516/* Opcode 0xf3 0x0f 0x2e - invalid */
2517/* Opcode 0xf2 0x0f 0x2e - invalid */
2518
2519/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2520FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2521/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2522FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2523/* Opcode 0xf3 0x0f 0x2f - invalid */
2524/* Opcode 0xf2 0x0f 0x2f - invalid */
2525
2526/** Opcode 0x0f 0x30. */
2527FNIEMOP_DEF(iemOp_wrmsr)
2528{
2529 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2531 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2532}
2533
2534
2535/** Opcode 0x0f 0x31. */
2536FNIEMOP_DEF(iemOp_rdtsc)
2537{
2538 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2540 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2541}
2542
2543
2544/** Opcode 0x0f 0x33. */
2545FNIEMOP_DEF(iemOp_rdmsr)
2546{
2547 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2549 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2550}
2551
2552
2553/** Opcode 0x0f 0x34. */
2554FNIEMOP_DEF(iemOp_rdpmc)
2555{
2556 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2558 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2559}
2560
2561
2562/** Opcode 0x0f 0x34. */
2563FNIEMOP_STUB(iemOp_sysenter);
2564/** Opcode 0x0f 0x35. */
2565FNIEMOP_STUB(iemOp_sysexit);
2566/** Opcode 0x0f 0x37. */
2567FNIEMOP_STUB(iemOp_getsec);
2568
2569
2570/** Opcode 0x0f 0x38. */
2571FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2572{
2573#ifdef IEM_WITH_THREE_0F_38
2574 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2575 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2576#else
2577 IEMOP_BITCH_ABOUT_STUB();
2578 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2579#endif
2580}
2581
2582
2583/** Opcode 0x0f 0x3a. */
2584FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2585{
2586#ifdef IEM_WITH_THREE_0F_3A
2587 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2588 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2589#else
2590 IEMOP_BITCH_ABOUT_STUB();
2591 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2592#endif
2593}
2594
2595
2596/**
2597 * Implements a conditional move.
2598 *
2599 * Wish there was an obvious way to do this where we could share and reduce
2600 * code bloat.
2601 *
2602 * @param a_Cnd The conditional "microcode" operation.
2603 */
2604#define CMOV_X(a_Cnd) \
2605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2606 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2607 { \
2608 switch (pVCpu->iem.s.enmEffOpSize) \
2609 { \
2610 case IEMMODE_16BIT: \
2611 IEM_MC_BEGIN(0, 1); \
2612 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2613 a_Cnd { \
2614 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2615 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2616 } IEM_MC_ENDIF(); \
2617 IEM_MC_ADVANCE_RIP(); \
2618 IEM_MC_END(); \
2619 return VINF_SUCCESS; \
2620 \
2621 case IEMMODE_32BIT: \
2622 IEM_MC_BEGIN(0, 1); \
2623 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2624 a_Cnd { \
2625 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2626 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2627 } IEM_MC_ELSE() { \
2628 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2629 } IEM_MC_ENDIF(); \
2630 IEM_MC_ADVANCE_RIP(); \
2631 IEM_MC_END(); \
2632 return VINF_SUCCESS; \
2633 \
2634 case IEMMODE_64BIT: \
2635 IEM_MC_BEGIN(0, 1); \
2636 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2637 a_Cnd { \
2638 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2639 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2640 } IEM_MC_ENDIF(); \
2641 IEM_MC_ADVANCE_RIP(); \
2642 IEM_MC_END(); \
2643 return VINF_SUCCESS; \
2644 \
2645 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2646 } \
2647 } \
2648 else \
2649 { \
2650 switch (pVCpu->iem.s.enmEffOpSize) \
2651 { \
2652 case IEMMODE_16BIT: \
2653 IEM_MC_BEGIN(0, 2); \
2654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2655 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2657 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2658 a_Cnd { \
2659 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2660 } IEM_MC_ENDIF(); \
2661 IEM_MC_ADVANCE_RIP(); \
2662 IEM_MC_END(); \
2663 return VINF_SUCCESS; \
2664 \
2665 case IEMMODE_32BIT: \
2666 IEM_MC_BEGIN(0, 2); \
2667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2668 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2670 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2671 a_Cnd { \
2672 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2673 } IEM_MC_ELSE() { \
2674 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2675 } IEM_MC_ENDIF(); \
2676 IEM_MC_ADVANCE_RIP(); \
2677 IEM_MC_END(); \
2678 return VINF_SUCCESS; \
2679 \
2680 case IEMMODE_64BIT: \
2681 IEM_MC_BEGIN(0, 2); \
2682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2683 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2685 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2686 a_Cnd { \
2687 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2688 } IEM_MC_ENDIF(); \
2689 IEM_MC_ADVANCE_RIP(); \
2690 IEM_MC_END(); \
2691 return VINF_SUCCESS; \
2692 \
2693 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2694 } \
2695 } do {} while (0)
2696
2697
2698
2699/** Opcode 0x0f 0x40. */
2700FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2701{
2702 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2703 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2704}
2705
2706
2707/** Opcode 0x0f 0x41. */
2708FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2709{
2710 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2711 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2712}
2713
2714
2715/** Opcode 0x0f 0x42. */
2716FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2717{
2718 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2719 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2720}
2721
2722
2723/** Opcode 0x0f 0x43. */
2724FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2725{
2726 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2727 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2728}
2729
2730
2731/** Opcode 0x0f 0x44. */
2732FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2733{
2734 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2735 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2736}
2737
2738
2739/** Opcode 0x0f 0x45. */
2740FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2741{
2742 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2743 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2744}
2745
2746
2747/** Opcode 0x0f 0x46. */
2748FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2749{
2750 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2751 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2752}
2753
2754
2755/** Opcode 0x0f 0x47. */
2756FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2757{
2758 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2759 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2760}
2761
2762
2763/** Opcode 0x0f 0x48. */
2764FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2765{
2766 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2767 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2768}
2769
2770
2771/** Opcode 0x0f 0x49. */
2772FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2773{
2774 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2775 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2776}
2777
2778
2779/** Opcode 0x0f 0x4a. */
2780FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2781{
2782 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2783 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2784}
2785
2786
2787/** Opcode 0x0f 0x4b. */
2788FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2789{
2790 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2791 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2792}
2793
2794
2795/** Opcode 0x0f 0x4c. */
2796FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2800}
2801
2802
2803/** Opcode 0x0f 0x4d. */
2804FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2808}
2809
2810
2811/** Opcode 0x0f 0x4e. */
2812FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2816}
2817
2818
2819/** Opcode 0x0f 0x4f. */
2820FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2824}
2825
2826#undef CMOV_X
2827
2828/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2829FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2830/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2831FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2832/* Opcode 0xf3 0x0f 0x50 - invalid */
2833/* Opcode 0xf2 0x0f 0x50 - invalid */
2834
2835/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2836FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2837/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2838FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2839/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2840FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2841/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2842FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2843
2844/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2845FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2846/* Opcode 0x66 0x0f 0x52 - invalid */
2847/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2848FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2849/* Opcode 0xf2 0x0f 0x52 - invalid */
2850
2851/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2852FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2853/* Opcode 0x66 0x0f 0x53 - invalid */
2854/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2855FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2856/* Opcode 0xf2 0x0f 0x53 - invalid */
2857
2858/** Opcode 0x0f 0x54 - andps Vps, Wps */
2859FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2860/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2861FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2862/* Opcode 0xf3 0x0f 0x54 - invalid */
2863/* Opcode 0xf2 0x0f 0x54 - invalid */
2864
2865/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2866FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2867/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2868FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2869/* Opcode 0xf3 0x0f 0x55 - invalid */
2870/* Opcode 0xf2 0x0f 0x55 - invalid */
2871
2872/** Opcode 0x0f 0x56 - orps Vps, Wps */
2873FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2874/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2875FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2876/* Opcode 0xf3 0x0f 0x56 - invalid */
2877/* Opcode 0xf2 0x0f 0x56 - invalid */
2878
2879/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2880FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2881/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2882FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2883/* Opcode 0xf3 0x0f 0x57 - invalid */
2884/* Opcode 0xf2 0x0f 0x57 - invalid */
2885
2886/** Opcode 0x0f 0x58 - addps Vps, Wps */
2887FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2888/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2889FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2890/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2891FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2892/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2893FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2894
2895/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2896FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2897/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2898FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2899/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2900FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2901/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2902FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2903
2904/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2905FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2906/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2907FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2908/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2909FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2910/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2911FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2912
2913/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2914FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2915/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2916FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2917/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2918FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2919/* Opcode 0xf2 0x0f 0x5b - invalid */
2920
2921/** Opcode 0x0f 0x5c - subps Vps, Wps */
2922FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2923/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2924FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2925/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2926FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2927/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2928FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2929
2930/** Opcode 0x0f 0x5d - minps Vps, Wps */
2931FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2932/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2933FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2934/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2935FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2936/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2937FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2938
2939/** Opcode 0x0f 0x5e - divps Vps, Wps */
2940FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2941/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2942FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2943/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2944FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2945/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2946FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2947
2948/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2949FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2950/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2951FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2952/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2953FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2954/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2955FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2956
2957/**
2958 * Common worker for MMX instructions on the forms:
2959 * pxxxx mm1, mm2/mem32
2960 *
2961 * The 2nd operand is the first half of a register, which in the memory case
2962 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2963 * memory accessed for MMX.
2964 *
2965 * Exceptions type 4.
2966 */
2967FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2968{
2969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2971 {
2972 /*
2973 * Register, register.
2974 */
2975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2976 IEM_MC_BEGIN(2, 0);
2977 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2978 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2979 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2980 IEM_MC_PREPARE_SSE_USAGE();
2981 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2982 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2983 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2984 IEM_MC_ADVANCE_RIP();
2985 IEM_MC_END();
2986 }
2987 else
2988 {
2989 /*
2990 * Register, memory.
2991 */
2992 IEM_MC_BEGIN(2, 2);
2993 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2994 IEM_MC_LOCAL(uint64_t, uSrc);
2995 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2997
2998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3000 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3001 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3002
3003 IEM_MC_PREPARE_SSE_USAGE();
3004 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3005 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3006
3007 IEM_MC_ADVANCE_RIP();
3008 IEM_MC_END();
3009 }
3010 return VINF_SUCCESS;
3011}
3012
3013
3014/**
3015 * Common worker for SSE2 instructions on the forms:
3016 * pxxxx xmm1, xmm2/mem128
3017 *
3018 * The 2nd operand is the first half of a register, which in the memory case
3019 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3020 * memory accessed for MMX.
3021 *
3022 * Exceptions type 4.
3023 */
3024FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3025{
3026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3027 if (!pImpl->pfnU64)
3028 return IEMOP_RAISE_INVALID_OPCODE();
3029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3030 {
3031 /*
3032 * Register, register.
3033 */
3034 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3035 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3037 IEM_MC_BEGIN(2, 0);
3038 IEM_MC_ARG(uint64_t *, pDst, 0);
3039 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3040 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3041 IEM_MC_PREPARE_FPU_USAGE();
3042 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3043 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3044 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3045 IEM_MC_ADVANCE_RIP();
3046 IEM_MC_END();
3047 }
3048 else
3049 {
3050 /*
3051 * Register, memory.
3052 */
3053 IEM_MC_BEGIN(2, 2);
3054 IEM_MC_ARG(uint64_t *, pDst, 0);
3055 IEM_MC_LOCAL(uint32_t, uSrc);
3056 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3058
3059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3061 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3062 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3063
3064 IEM_MC_PREPARE_FPU_USAGE();
3065 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3066 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3067
3068 IEM_MC_ADVANCE_RIP();
3069 IEM_MC_END();
3070 }
3071 return VINF_SUCCESS;
3072}
3073
3074
3075/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3076FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3077{
3078 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3079 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3080}
3081
3082/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3083FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3084{
3085 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3086 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3087}
3088
3089/* Opcode 0xf3 0x0f 0x60 - invalid */
3090
3091
3092/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3093FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3094{
3095 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3096 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3097}
3098
3099/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3100FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3101{
3102 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3103 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3104}
3105
3106/* Opcode 0xf3 0x0f 0x61 - invalid */
3107
3108
3109/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3110FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3111{
3112 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3113 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3114}
3115
3116/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3117FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3118{
3119 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3120 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3121}
3122
3123/* Opcode 0xf3 0x0f 0x62 - invalid */
3124
3125
3126
3127/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3128FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3129/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3130FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3131/* Opcode 0xf3 0x0f 0x63 - invalid */
3132
3133/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3134FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3135/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3136FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3137/* Opcode 0xf3 0x0f 0x64 - invalid */
3138
3139/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3140FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3141/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3142FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3143/* Opcode 0xf3 0x0f 0x65 - invalid */
3144
3145/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3146FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3147/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3148FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3149/* Opcode 0xf3 0x0f 0x66 - invalid */
3150
3151/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3152FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3153/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3154FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3155/* Opcode 0xf3 0x0f 0x67 - invalid */
3156
3157
3158/**
3159 * Common worker for MMX instructions on the form:
3160 * pxxxx mm1, mm2/mem64
3161 *
3162 * The 2nd operand is the second half of a register, which in the memory case
3163 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3164 * where it may read the full 128 bits or only the upper 64 bits.
3165 *
3166 * Exceptions type 4.
3167 */
3168FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3169{
3170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3171 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3172 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3173 {
3174 /*
3175 * Register, register.
3176 */
3177 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3178 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3180 IEM_MC_BEGIN(2, 0);
3181 IEM_MC_ARG(uint64_t *, pDst, 0);
3182 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3183 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3184 IEM_MC_PREPARE_FPU_USAGE();
3185 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3186 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3187 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3188 IEM_MC_ADVANCE_RIP();
3189 IEM_MC_END();
3190 }
3191 else
3192 {
3193 /*
3194 * Register, memory.
3195 */
3196 IEM_MC_BEGIN(2, 2);
3197 IEM_MC_ARG(uint64_t *, pDst, 0);
3198 IEM_MC_LOCAL(uint64_t, uSrc);
3199 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3201
3202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3204 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3205 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3206
3207 IEM_MC_PREPARE_FPU_USAGE();
3208 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3209 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3210
3211 IEM_MC_ADVANCE_RIP();
3212 IEM_MC_END();
3213 }
3214 return VINF_SUCCESS;
3215}
3216
3217
3218/**
3219 * Common worker for SSE2 instructions on the form:
3220 * pxxxx xmm1, xmm2/mem128
3221 *
3222 * The 2nd operand is the second half of a register, which in the memory case
3223 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3224 * where it may read the full 128 bits or only the upper 64 bits.
3225 *
3226 * Exceptions type 4.
3227 */
3228FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3229{
3230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3231 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3232 {
3233 /*
3234 * Register, register.
3235 */
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237 IEM_MC_BEGIN(2, 0);
3238 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3239 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3240 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3241 IEM_MC_PREPARE_SSE_USAGE();
3242 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3243 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3244 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3245 IEM_MC_ADVANCE_RIP();
3246 IEM_MC_END();
3247 }
3248 else
3249 {
3250 /*
3251 * Register, memory.
3252 */
3253 IEM_MC_BEGIN(2, 2);
3254 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3255 IEM_MC_LOCAL(RTUINT128U, uSrc);
3256 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3258
3259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3262 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3263
3264 IEM_MC_PREPARE_SSE_USAGE();
3265 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3266 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3267
3268 IEM_MC_ADVANCE_RIP();
3269 IEM_MC_END();
3270 }
3271 return VINF_SUCCESS;
3272}
3273
3274
3275/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3276FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3277{
3278 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3279 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3280}
3281
3282/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3283FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3284{
3285 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3286 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3287}
3288/* Opcode 0xf3 0x0f 0x68 - invalid */
3289
3290
3291/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3292FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3293{
3294 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3295 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3296}
3297
3298/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3299FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3300{
3301 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3302 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3303
3304}
3305/* Opcode 0xf3 0x0f 0x69 - invalid */
3306
3307
3308/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3309FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3310{
3311 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3312 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3313}
3314
3315/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3316FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3317{
3318 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3319 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3320}
3321/* Opcode 0xf3 0x0f 0x6a - invalid */
3322
3323
3324/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3325FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3326/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3327FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3328/* Opcode 0xf3 0x0f 0x6b - invalid */
3329
3330
3331/* Opcode 0x0f 0x6c - invalid */
3332
3333/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3334FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3335{
3336 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3337 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3338}
3339
3340/* Opcode 0xf3 0x0f 0x6c - invalid */
3341/* Opcode 0xf2 0x0f 0x6c - invalid */
3342
3343
3344/* Opcode 0x0f 0x6d - invalid */
3345
3346/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3347FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3348{
3349 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3350 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3351}
3352
3353/* Opcode 0xf3 0x0f 0x6d - invalid */
3354
3355
3356FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3357{
3358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3359 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3360 {
3361 /**
3362 * @opcode 0x6e
3363 * @opcodesub rex.w=1
3364 * @oppfx none
3365 * @opcpuid mmx
3366 * @opgroup og_mmx_datamove
3367 * @opxcpttype 5
3368 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3369 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3370 */
3371 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3373 {
3374 /* MMX, greg64 */
3375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3376 IEM_MC_BEGIN(0, 1);
3377 IEM_MC_LOCAL(uint64_t, u64Tmp);
3378
3379 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3380 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3381
3382 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3383 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3384 IEM_MC_FPU_TO_MMX_MODE();
3385
3386 IEM_MC_ADVANCE_RIP();
3387 IEM_MC_END();
3388 }
3389 else
3390 {
3391 /* MMX, [mem64] */
3392 IEM_MC_BEGIN(0, 2);
3393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3394 IEM_MC_LOCAL(uint64_t, u64Tmp);
3395
3396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3398 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3399 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3400
3401 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3402 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3403 IEM_MC_FPU_TO_MMX_MODE();
3404
3405 IEM_MC_ADVANCE_RIP();
3406 IEM_MC_END();
3407 }
3408 }
3409 else
3410 {
3411 /**
3412 * @opdone
3413 * @opcode 0x6e
3414 * @opcodesub rex.w=0
3415 * @oppfx none
3416 * @opcpuid mmx
3417 * @opgroup og_mmx_datamove
3418 * @opxcpttype 5
3419 * @opfunction iemOp_movd_q_Pd_Ey
3420 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3421 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3422 */
3423 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3424 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3425 {
3426 /* MMX, greg */
3427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3428 IEM_MC_BEGIN(0, 1);
3429 IEM_MC_LOCAL(uint64_t, u64Tmp);
3430
3431 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3432 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3433
3434 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3435 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3436 IEM_MC_FPU_TO_MMX_MODE();
3437
3438 IEM_MC_ADVANCE_RIP();
3439 IEM_MC_END();
3440 }
3441 else
3442 {
3443 /* MMX, [mem] */
3444 IEM_MC_BEGIN(0, 2);
3445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3446 IEM_MC_LOCAL(uint32_t, u32Tmp);
3447
3448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3450 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3451 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3452
3453 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3454 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3455 IEM_MC_FPU_TO_MMX_MODE();
3456
3457 IEM_MC_ADVANCE_RIP();
3458 IEM_MC_END();
3459 }
3460 }
3461 return VINF_SUCCESS;
3462}
3463
3464FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3465{
3466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3467 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3468 {
3469 /**
3470 * @opcode 0x6e
3471 * @opcodesub rex.w=1
3472 * @oppfx 0x66
3473 * @opcpuid sse2
3474 * @opgroup og_sse2_simdint_datamove
3475 * @opxcpttype 5
3476 * @optest 64-bit / op1=1 op2=2 -> op1=2
3477 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3478 */
3479 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3480 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3481 {
3482 /* XMM, greg64 */
3483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3484 IEM_MC_BEGIN(0, 1);
3485 IEM_MC_LOCAL(uint64_t, u64Tmp);
3486
3487 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3489
3490 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3491 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3492
3493 IEM_MC_ADVANCE_RIP();
3494 IEM_MC_END();
3495 }
3496 else
3497 {
3498 /* XMM, [mem64] */
3499 IEM_MC_BEGIN(0, 2);
3500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3501 IEM_MC_LOCAL(uint64_t, u64Tmp);
3502
3503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3505 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3506 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3507
3508 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3509 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3510
3511 IEM_MC_ADVANCE_RIP();
3512 IEM_MC_END();
3513 }
3514 }
3515 else
3516 {
3517 /**
3518 * @opdone
3519 * @opcode 0x6e
3520 * @opcodesub rex.w=0
3521 * @oppfx 0x66
3522 * @opcpuid sse2
3523 * @opgroup og_sse2_simdint_datamove
3524 * @opxcpttype 5
3525 * @opfunction iemOp_movd_q_Vy_Ey
3526 * @optest op1=1 op2=2 -> op1=2
3527 * @optest op1=0 op2=-42 -> op1=-42
3528 */
3529 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3530 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3531 {
3532 /* XMM, greg32 */
3533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3534 IEM_MC_BEGIN(0, 1);
3535 IEM_MC_LOCAL(uint32_t, u32Tmp);
3536
3537 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3538 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3539
3540 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3541 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3542
3543 IEM_MC_ADVANCE_RIP();
3544 IEM_MC_END();
3545 }
3546 else
3547 {
3548 /* XMM, [mem32] */
3549 IEM_MC_BEGIN(0, 2);
3550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3551 IEM_MC_LOCAL(uint32_t, u32Tmp);
3552
3553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3556 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3557
3558 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3559 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3560
3561 IEM_MC_ADVANCE_RIP();
3562 IEM_MC_END();
3563 }
3564 }
3565 return VINF_SUCCESS;
3566}
3567
3568/* Opcode 0xf3 0x0f 0x6e - invalid */
3569
3570
3571/**
3572 * @opcode 0x6f
3573 * @oppfx none
3574 * @opcpuid mmx
3575 * @opgroup og_mmx_datamove
3576 * @opxcpttype 5
3577 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3578 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3579 */
3580FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3581{
3582 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3585 {
3586 /*
3587 * Register, register.
3588 */
3589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3590 IEM_MC_BEGIN(0, 1);
3591 IEM_MC_LOCAL(uint64_t, u64Tmp);
3592
3593 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3594 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3595
3596 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3597 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3598 IEM_MC_FPU_TO_MMX_MODE();
3599
3600 IEM_MC_ADVANCE_RIP();
3601 IEM_MC_END();
3602 }
3603 else
3604 {
3605 /*
3606 * Register, memory.
3607 */
3608 IEM_MC_BEGIN(0, 2);
3609 IEM_MC_LOCAL(uint64_t, u64Tmp);
3610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3611
3612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3614 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3615 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3616
3617 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3618 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3619 IEM_MC_FPU_TO_MMX_MODE();
3620
3621 IEM_MC_ADVANCE_RIP();
3622 IEM_MC_END();
3623 }
3624 return VINF_SUCCESS;
3625}
3626
3627/**
3628 * @opcode 0x6f
3629 * @oppfx 0x66
3630 * @opcpuid sse2
3631 * @opgroup og_sse2_simdint_datamove
3632 * @opxcpttype 1
3633 * @optest op1=1 op2=2 -> op1=2
3634 * @optest op1=0 op2=-42 -> op1=-42
3635 */
3636FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3637{
3638 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3641 {
3642 /*
3643 * Register, register.
3644 */
3645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3646 IEM_MC_BEGIN(0, 0);
3647
3648 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3649 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3650
3651 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3652 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3653 IEM_MC_ADVANCE_RIP();
3654 IEM_MC_END();
3655 }
3656 else
3657 {
3658 /*
3659 * Register, memory.
3660 */
3661 IEM_MC_BEGIN(0, 2);
3662 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3664
3665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3667 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3669
3670 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3671 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3672
3673 IEM_MC_ADVANCE_RIP();
3674 IEM_MC_END();
3675 }
3676 return VINF_SUCCESS;
3677}
3678
3679/**
3680 * @opcode 0x6f
3681 * @oppfx 0xf3
3682 * @opcpuid sse2
3683 * @opgroup og_sse2_simdint_datamove
3684 * @opxcpttype 4UA
3685 * @optest op1=1 op2=2 -> op1=2
3686 * @optest op1=0 op2=-42 -> op1=-42
3687 */
3688FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3689{
3690 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3693 {
3694 /*
3695 * Register, register.
3696 */
3697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3698 IEM_MC_BEGIN(0, 0);
3699 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3700 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3701 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3702 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3703 IEM_MC_ADVANCE_RIP();
3704 IEM_MC_END();
3705 }
3706 else
3707 {
3708 /*
3709 * Register, memory.
3710 */
3711 IEM_MC_BEGIN(0, 2);
3712 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3714
3715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3719 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3720 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3721
3722 IEM_MC_ADVANCE_RIP();
3723 IEM_MC_END();
3724 }
3725 return VINF_SUCCESS;
3726}
3727
3728
3729/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3730FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3731{
3732 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3734 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3735 {
3736 /*
3737 * Register, register.
3738 */
3739 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741
3742 IEM_MC_BEGIN(3, 0);
3743 IEM_MC_ARG(uint64_t *, pDst, 0);
3744 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3745 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3746 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3747 IEM_MC_PREPARE_FPU_USAGE();
3748 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3749 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3750 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 }
3754 else
3755 {
3756 /*
3757 * Register, memory.
3758 */
3759 IEM_MC_BEGIN(3, 2);
3760 IEM_MC_ARG(uint64_t *, pDst, 0);
3761 IEM_MC_LOCAL(uint64_t, uSrc);
3762 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3764
3765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3766 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3767 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3769 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3770
3771 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3772 IEM_MC_PREPARE_FPU_USAGE();
3773 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3774 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3775
3776 IEM_MC_ADVANCE_RIP();
3777 IEM_MC_END();
3778 }
3779 return VINF_SUCCESS;
3780}
3781
3782/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3783FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3784{
3785 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3788 {
3789 /*
3790 * Register, register.
3791 */
3792 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794
3795 IEM_MC_BEGIN(3, 0);
3796 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3797 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3798 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3799 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3800 IEM_MC_PREPARE_SSE_USAGE();
3801 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3802 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3803 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3804 IEM_MC_ADVANCE_RIP();
3805 IEM_MC_END();
3806 }
3807 else
3808 {
3809 /*
3810 * Register, memory.
3811 */
3812 IEM_MC_BEGIN(3, 2);
3813 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3814 IEM_MC_LOCAL(RTUINT128U, uSrc);
3815 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3817
3818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3819 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3820 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3823
3824 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3825 IEM_MC_PREPARE_SSE_USAGE();
3826 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3827 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3828
3829 IEM_MC_ADVANCE_RIP();
3830 IEM_MC_END();
3831 }
3832 return VINF_SUCCESS;
3833}
3834
3835/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3836FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3837{
3838 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3841 {
3842 /*
3843 * Register, register.
3844 */
3845 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3847
3848 IEM_MC_BEGIN(3, 0);
3849 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3850 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3851 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3852 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3853 IEM_MC_PREPARE_SSE_USAGE();
3854 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3855 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3856 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3857 IEM_MC_ADVANCE_RIP();
3858 IEM_MC_END();
3859 }
3860 else
3861 {
3862 /*
3863 * Register, memory.
3864 */
3865 IEM_MC_BEGIN(3, 2);
3866 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3867 IEM_MC_LOCAL(RTUINT128U, uSrc);
3868 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3870
3871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3872 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3873 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3875 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3876
3877 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3878 IEM_MC_PREPARE_SSE_USAGE();
3879 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3880 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3881
3882 IEM_MC_ADVANCE_RIP();
3883 IEM_MC_END();
3884 }
3885 return VINF_SUCCESS;
3886}
3887
3888/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3889FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3890{
3891 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3894 {
3895 /*
3896 * Register, register.
3897 */
3898 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3900
3901 IEM_MC_BEGIN(3, 0);
3902 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3903 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3904 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3905 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3906 IEM_MC_PREPARE_SSE_USAGE();
3907 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3908 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3909 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3910 IEM_MC_ADVANCE_RIP();
3911 IEM_MC_END();
3912 }
3913 else
3914 {
3915 /*
3916 * Register, memory.
3917 */
3918 IEM_MC_BEGIN(3, 2);
3919 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3920 IEM_MC_LOCAL(RTUINT128U, uSrc);
3921 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3923
3924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3925 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3926 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3928 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3929
3930 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3931 IEM_MC_PREPARE_SSE_USAGE();
3932 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3933 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3934
3935 IEM_MC_ADVANCE_RIP();
3936 IEM_MC_END();
3937 }
3938 return VINF_SUCCESS;
3939}
3940
3941
3942/** Opcode 0x0f 0x71 11/2. */
3943FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3944
3945/** Opcode 0x66 0x0f 0x71 11/2. */
3946FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3947
3948/** Opcode 0x0f 0x71 11/4. */
3949FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3950
3951/** Opcode 0x66 0x0f 0x71 11/4. */
3952FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3953
3954/** Opcode 0x0f 0x71 11/6. */
3955FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3956
3957/** Opcode 0x66 0x0f 0x71 11/6. */
3958FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3959
3960
3961/**
3962 * Group 12 jump table for register variant.
3963 */
3964IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3965{
3966 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3967 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3968 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3969 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3970 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3971 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3972 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3973 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3974};
3975AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3976
3977
3978/** Opcode 0x0f 0x71. */
3979FNIEMOP_DEF(iemOp_Grp12)
3980{
3981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3982 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3983 /* register, register */
3984 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3985 + pVCpu->iem.s.idxPrefix], bRm);
3986 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3987}
3988
3989
3990/** Opcode 0x0f 0x72 11/2. */
3991FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3992
3993/** Opcode 0x66 0x0f 0x72 11/2. */
3994FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3995
3996/** Opcode 0x0f 0x72 11/4. */
3997FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3998
3999/** Opcode 0x66 0x0f 0x72 11/4. */
4000FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4001
4002/** Opcode 0x0f 0x72 11/6. */
4003FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4004
4005/** Opcode 0x66 0x0f 0x72 11/6. */
4006FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4007
4008
4009/**
4010 * Group 13 jump table for register variant.
4011 */
4012IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4013{
4014 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4015 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4016 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4017 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4018 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4019 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4020 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4021 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4022};
4023AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4024
4025/** Opcode 0x0f 0x72. */
4026FNIEMOP_DEF(iemOp_Grp13)
4027{
4028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4030 /* register, register */
4031 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4032 + pVCpu->iem.s.idxPrefix], bRm);
4033 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4034}
4035
4036
4037/** Opcode 0x0f 0x73 11/2. */
4038FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4039
4040/** Opcode 0x66 0x0f 0x73 11/2. */
4041FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4042
4043/** Opcode 0x66 0x0f 0x73 11/3. */
4044FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4045
4046/** Opcode 0x0f 0x73 11/6. */
4047FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4048
4049/** Opcode 0x66 0x0f 0x73 11/6. */
4050FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4051
4052/** Opcode 0x66 0x0f 0x73 11/7. */
4053FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4054
4055/**
4056 * Group 14 jump table for register variant.
4057 */
4058IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4059{
4060 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4061 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4062 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4063 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4064 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4065 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4066 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4067 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4068};
4069AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4070
4071
4072/** Opcode 0x0f 0x73. */
4073FNIEMOP_DEF(iemOp_Grp14)
4074{
4075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4077 /* register, register */
4078 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4079 + pVCpu->iem.s.idxPrefix], bRm);
4080 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4081}
4082
4083
4084/**
4085 * Common worker for MMX instructions on the form:
4086 * pxxx mm1, mm2/mem64
4087 */
4088FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4089{
4090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4092 {
4093 /*
4094 * Register, register.
4095 */
4096 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4097 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4099 IEM_MC_BEGIN(2, 0);
4100 IEM_MC_ARG(uint64_t *, pDst, 0);
4101 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4102 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4103 IEM_MC_PREPARE_FPU_USAGE();
4104 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4105 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4106 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4107 IEM_MC_ADVANCE_RIP();
4108 IEM_MC_END();
4109 }
4110 else
4111 {
4112 /*
4113 * Register, memory.
4114 */
4115 IEM_MC_BEGIN(2, 2);
4116 IEM_MC_ARG(uint64_t *, pDst, 0);
4117 IEM_MC_LOCAL(uint64_t, uSrc);
4118 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4120
4121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4123 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4124 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4125
4126 IEM_MC_PREPARE_FPU_USAGE();
4127 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4128 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4129
4130 IEM_MC_ADVANCE_RIP();
4131 IEM_MC_END();
4132 }
4133 return VINF_SUCCESS;
4134}
4135
4136
4137/**
4138 * Common worker for SSE2 instructions on the forms:
4139 * pxxx xmm1, xmm2/mem128
4140 *
4141 * Proper alignment of the 128-bit operand is enforced.
4142 * Exceptions type 4. SSE2 cpuid checks.
4143 */
4144FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4145{
4146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4148 {
4149 /*
4150 * Register, register.
4151 */
4152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4153 IEM_MC_BEGIN(2, 0);
4154 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4155 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4156 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4157 IEM_MC_PREPARE_SSE_USAGE();
4158 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4159 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4160 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4161 IEM_MC_ADVANCE_RIP();
4162 IEM_MC_END();
4163 }
4164 else
4165 {
4166 /*
4167 * Register, memory.
4168 */
4169 IEM_MC_BEGIN(2, 2);
4170 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4171 IEM_MC_LOCAL(RTUINT128U, uSrc);
4172 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4174
4175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4177 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4178 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4179
4180 IEM_MC_PREPARE_SSE_USAGE();
4181 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4182 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4183
4184 IEM_MC_ADVANCE_RIP();
4185 IEM_MC_END();
4186 }
4187 return VINF_SUCCESS;
4188}
4189
4190
4191/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4192FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4193{
4194 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4195 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4196}
4197
4198/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4199FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4200{
4201 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4202 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4203}
4204
4205/* Opcode 0xf3 0x0f 0x74 - invalid */
4206/* Opcode 0xf2 0x0f 0x74 - invalid */
4207
4208
4209/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4210FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4211{
4212 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4213 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4214}
4215
4216/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4217FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4218{
4219 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4220 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4221}
4222
4223/* Opcode 0xf3 0x0f 0x75 - invalid */
4224/* Opcode 0xf2 0x0f 0x75 - invalid */
4225
4226
4227/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4228FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4229{
4230 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4231 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4232}
4233
4234/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4235FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4236{
4237 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4238 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4239}
4240
4241/* Opcode 0xf3 0x0f 0x76 - invalid */
4242/* Opcode 0xf2 0x0f 0x76 - invalid */
4243
4244
4245/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4246FNIEMOP_DEF(iemOp_emms)
4247{
4248 IEMOP_MNEMONIC(emms, "emms");
4249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4250
4251 IEM_MC_BEGIN(0,0);
4252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4253 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4254 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4255 IEM_MC_FPU_FROM_MMX_MODE();
4256 IEM_MC_ADVANCE_RIP();
4257 IEM_MC_END();
4258 return VINF_SUCCESS;
4259}
4260
4261/* Opcode 0x66 0x0f 0x77 - invalid */
4262/* Opcode 0xf3 0x0f 0x77 - invalid */
4263/* Opcode 0xf2 0x0f 0x77 - invalid */
4264
4265/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4266FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4267/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4268FNIEMOP_STUB(iemOp_AmdGrp17);
4269/* Opcode 0xf3 0x0f 0x78 - invalid */
4270/* Opcode 0xf2 0x0f 0x78 - invalid */
4271
4272/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4273FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4274/* Opcode 0x66 0x0f 0x79 - invalid */
4275/* Opcode 0xf3 0x0f 0x79 - invalid */
4276/* Opcode 0xf2 0x0f 0x79 - invalid */
4277
4278/* Opcode 0x0f 0x7a - invalid */
4279/* Opcode 0x66 0x0f 0x7a - invalid */
4280/* Opcode 0xf3 0x0f 0x7a - invalid */
4281/* Opcode 0xf2 0x0f 0x7a - invalid */
4282
4283/* Opcode 0x0f 0x7b - invalid */
4284/* Opcode 0x66 0x0f 0x7b - invalid */
4285/* Opcode 0xf3 0x0f 0x7b - invalid */
4286/* Opcode 0xf2 0x0f 0x7b - invalid */
4287
4288/* Opcode 0x0f 0x7c - invalid */
4289/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4290FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4291/* Opcode 0xf3 0x0f 0x7c - invalid */
4292/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4293FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4294
4295/* Opcode 0x0f 0x7d - invalid */
4296/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4297FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4298/* Opcode 0xf3 0x0f 0x7d - invalid */
4299/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4300FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4301
4302
4303/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4304FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4305{
4306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4307 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4308 {
4309 /**
4310 * @opcode 0x7e
4311 * @opcodesub rex.w=1
4312 * @oppfx none
4313 * @opcpuid mmx
4314 * @opgroup og_mmx_datamove
4315 * @opxcpttype 5
4316 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4317 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4318 */
4319 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4320 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4321 {
4322 /* greg64, MMX */
4323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4324 IEM_MC_BEGIN(0, 1);
4325 IEM_MC_LOCAL(uint64_t, u64Tmp);
4326
4327 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4328 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4329
4330 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4331 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4332 IEM_MC_FPU_TO_MMX_MODE();
4333
4334 IEM_MC_ADVANCE_RIP();
4335 IEM_MC_END();
4336 }
4337 else
4338 {
4339 /* [mem64], MMX */
4340 IEM_MC_BEGIN(0, 2);
4341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4342 IEM_MC_LOCAL(uint64_t, u64Tmp);
4343
4344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4346 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4347 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4348
4349 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4350 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4351 IEM_MC_FPU_TO_MMX_MODE();
4352
4353 IEM_MC_ADVANCE_RIP();
4354 IEM_MC_END();
4355 }
4356 }
4357 else
4358 {
4359 /**
4360 * @opdone
4361 * @opcode 0x7e
4362 * @opcodesub rex.w=0
4363 * @oppfx none
4364 * @opcpuid mmx
4365 * @opgroup og_mmx_datamove
4366 * @opxcpttype 5
4367 * @opfunction iemOp_movd_q_Pd_Ey
4368 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4369 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4370 */
4371 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4373 {
4374 /* greg32, MMX */
4375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4376 IEM_MC_BEGIN(0, 1);
4377 IEM_MC_LOCAL(uint32_t, u32Tmp);
4378
4379 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4380 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4381
4382 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4383 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4384 IEM_MC_FPU_TO_MMX_MODE();
4385
4386 IEM_MC_ADVANCE_RIP();
4387 IEM_MC_END();
4388 }
4389 else
4390 {
4391 /* [mem32], MMX */
4392 IEM_MC_BEGIN(0, 2);
4393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4394 IEM_MC_LOCAL(uint32_t, u32Tmp);
4395
4396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4399 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4400
4401 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4402 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4403 IEM_MC_FPU_TO_MMX_MODE();
4404
4405 IEM_MC_ADVANCE_RIP();
4406 IEM_MC_END();
4407 }
4408 }
4409 return VINF_SUCCESS;
4410
4411}
4412
4413
4414FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4415{
4416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4417 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4418 {
4419 /**
4420 * @opcode 0x7e
4421 * @opcodesub rex.w=1
4422 * @oppfx 0x66
4423 * @opcpuid sse2
4424 * @opgroup og_sse2_simdint_datamove
4425 * @opxcpttype 5
4426 * @optest 64-bit / op1=1 op2=2 -> op1=2
4427 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4428 */
4429 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4430 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4431 {
4432 /* greg64, XMM */
4433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4434 IEM_MC_BEGIN(0, 1);
4435 IEM_MC_LOCAL(uint64_t, u64Tmp);
4436
4437 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4439
4440 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4441 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4442
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 }
4446 else
4447 {
4448 /* [mem64], XMM */
4449 IEM_MC_BEGIN(0, 2);
4450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4451 IEM_MC_LOCAL(uint64_t, u64Tmp);
4452
4453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4455 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4456 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4457
4458 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4459 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4460
4461 IEM_MC_ADVANCE_RIP();
4462 IEM_MC_END();
4463 }
4464 }
4465 else
4466 {
4467 /**
4468 * @opdone
4469 * @opcode 0x7e
4470 * @opcodesub rex.w=0
4471 * @oppfx 0x66
4472 * @opcpuid sse2
4473 * @opgroup og_sse2_simdint_datamove
4474 * @opxcpttype 5
4475 * @opfunction iemOp_movd_q_Vy_Ey
4476 * @optest op1=1 op2=2 -> op1=2
4477 * @optest op1=0 op2=-42 -> op1=-42
4478 */
4479 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4480 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4481 {
4482 /* greg32, XMM */
4483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4484 IEM_MC_BEGIN(0, 1);
4485 IEM_MC_LOCAL(uint32_t, u32Tmp);
4486
4487 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4489
4490 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4491 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4492
4493 IEM_MC_ADVANCE_RIP();
4494 IEM_MC_END();
4495 }
4496 else
4497 {
4498 /* [mem32], XMM */
4499 IEM_MC_BEGIN(0, 2);
4500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4501 IEM_MC_LOCAL(uint32_t, u32Tmp);
4502
4503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4505 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4506 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4507
4508 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4509 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4510
4511 IEM_MC_ADVANCE_RIP();
4512 IEM_MC_END();
4513 }
4514 }
4515 return VINF_SUCCESS;
4516
4517}
4518
4519/**
4520 * @opcode 0x7e
4521 * @oppfx 0xf3
4522 * @opcpuid sse2
4523 * @opgroup og_sse2_pcksclr_datamove
4524 * @opxcpttype none
4525 * @optest op1=1 op2=2 -> op1=2
4526 * @optest op1=0 op2=-42 -> op1=-42
4527 */
4528FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4529{
4530 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4533 {
4534 /*
4535 * Register, register.
4536 */
4537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4538 IEM_MC_BEGIN(0, 2);
4539 IEM_MC_LOCAL(uint64_t, uSrc);
4540
4541 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4542 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4543
4544 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4545 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4546
4547 IEM_MC_ADVANCE_RIP();
4548 IEM_MC_END();
4549 }
4550 else
4551 {
4552 /*
4553 * Memory, register.
4554 */
4555 IEM_MC_BEGIN(0, 2);
4556 IEM_MC_LOCAL(uint64_t, uSrc);
4557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4558
4559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4561 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4562 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4563
4564 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4565 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4566
4567 IEM_MC_ADVANCE_RIP();
4568 IEM_MC_END();
4569 }
4570 return VINF_SUCCESS;
4571}
4572
4573/* Opcode 0xf2 0x0f 0x7e - invalid */
4574
4575
4576/** Opcode 0x0f 0x7f - movq Qq, Pq */
4577FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4578{
4579 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4581 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4582 {
4583 /*
4584 * Register, register.
4585 */
4586 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4587 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4589 IEM_MC_BEGIN(0, 1);
4590 IEM_MC_LOCAL(uint64_t, u64Tmp);
4591 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4592 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4593 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4594 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4595 IEM_MC_ADVANCE_RIP();
4596 IEM_MC_END();
4597 }
4598 else
4599 {
4600 /*
4601 * Register, memory.
4602 */
4603 IEM_MC_BEGIN(0, 2);
4604 IEM_MC_LOCAL(uint64_t, u64Tmp);
4605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4606
4607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4609 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4610 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4611
4612 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4613 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4614
4615 IEM_MC_ADVANCE_RIP();
4616 IEM_MC_END();
4617 }
4618 return VINF_SUCCESS;
4619}
4620
4621/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4622FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4623{
4624 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4626 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4627 {
4628 /*
4629 * Register, register.
4630 */
4631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4632 IEM_MC_BEGIN(0, 0);
4633 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4635 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4636 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4637 IEM_MC_ADVANCE_RIP();
4638 IEM_MC_END();
4639 }
4640 else
4641 {
4642 /*
4643 * Register, memory.
4644 */
4645 IEM_MC_BEGIN(0, 2);
4646 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4648
4649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4653
4654 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4655 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4656
4657 IEM_MC_ADVANCE_RIP();
4658 IEM_MC_END();
4659 }
4660 return VINF_SUCCESS;
4661}
4662
4663/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4664FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4665{
4666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4667 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4668 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4669 {
4670 /*
4671 * Register, register.
4672 */
4673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4674 IEM_MC_BEGIN(0, 0);
4675 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4676 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4677 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4678 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4679 IEM_MC_ADVANCE_RIP();
4680 IEM_MC_END();
4681 }
4682 else
4683 {
4684 /*
4685 * Register, memory.
4686 */
4687 IEM_MC_BEGIN(0, 2);
4688 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4690
4691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4693 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4694 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4695
4696 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4697 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4698
4699 IEM_MC_ADVANCE_RIP();
4700 IEM_MC_END();
4701 }
4702 return VINF_SUCCESS;
4703}
4704
4705/* Opcode 0xf2 0x0f 0x7f - invalid */
4706
4707
4708
4709/** Opcode 0x0f 0x80. */
4710FNIEMOP_DEF(iemOp_jo_Jv)
4711{
4712 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4713 IEMOP_HLP_MIN_386();
4714 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4715 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4716 {
4717 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4719
4720 IEM_MC_BEGIN(0, 0);
4721 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4722 IEM_MC_REL_JMP_S16(i16Imm);
4723 } IEM_MC_ELSE() {
4724 IEM_MC_ADVANCE_RIP();
4725 } IEM_MC_ENDIF();
4726 IEM_MC_END();
4727 }
4728 else
4729 {
4730 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4732
4733 IEM_MC_BEGIN(0, 0);
4734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4735 IEM_MC_REL_JMP_S32(i32Imm);
4736 } IEM_MC_ELSE() {
4737 IEM_MC_ADVANCE_RIP();
4738 } IEM_MC_ENDIF();
4739 IEM_MC_END();
4740 }
4741 return VINF_SUCCESS;
4742}
4743
4744
4745/** Opcode 0x0f 0x81. */
4746FNIEMOP_DEF(iemOp_jno_Jv)
4747{
4748 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4749 IEMOP_HLP_MIN_386();
4750 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4751 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4752 {
4753 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4755
4756 IEM_MC_BEGIN(0, 0);
4757 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4758 IEM_MC_ADVANCE_RIP();
4759 } IEM_MC_ELSE() {
4760 IEM_MC_REL_JMP_S16(i16Imm);
4761 } IEM_MC_ENDIF();
4762 IEM_MC_END();
4763 }
4764 else
4765 {
4766 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768
4769 IEM_MC_BEGIN(0, 0);
4770 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4771 IEM_MC_ADVANCE_RIP();
4772 } IEM_MC_ELSE() {
4773 IEM_MC_REL_JMP_S32(i32Imm);
4774 } IEM_MC_ENDIF();
4775 IEM_MC_END();
4776 }
4777 return VINF_SUCCESS;
4778}
4779
4780
4781/** Opcode 0x0f 0x82. */
4782FNIEMOP_DEF(iemOp_jc_Jv)
4783{
4784 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4785 IEMOP_HLP_MIN_386();
4786 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4787 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4788 {
4789 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4791
4792 IEM_MC_BEGIN(0, 0);
4793 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4794 IEM_MC_REL_JMP_S16(i16Imm);
4795 } IEM_MC_ELSE() {
4796 IEM_MC_ADVANCE_RIP();
4797 } IEM_MC_ENDIF();
4798 IEM_MC_END();
4799 }
4800 else
4801 {
4802 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4804
4805 IEM_MC_BEGIN(0, 0);
4806 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4807 IEM_MC_REL_JMP_S32(i32Imm);
4808 } IEM_MC_ELSE() {
4809 IEM_MC_ADVANCE_RIP();
4810 } IEM_MC_ENDIF();
4811 IEM_MC_END();
4812 }
4813 return VINF_SUCCESS;
4814}
4815
4816
4817/** Opcode 0x0f 0x83. */
4818FNIEMOP_DEF(iemOp_jnc_Jv)
4819{
4820 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4821 IEMOP_HLP_MIN_386();
4822 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4823 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4824 {
4825 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4827
4828 IEM_MC_BEGIN(0, 0);
4829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4830 IEM_MC_ADVANCE_RIP();
4831 } IEM_MC_ELSE() {
4832 IEM_MC_REL_JMP_S16(i16Imm);
4833 } IEM_MC_ENDIF();
4834 IEM_MC_END();
4835 }
4836 else
4837 {
4838 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840
4841 IEM_MC_BEGIN(0, 0);
4842 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4843 IEM_MC_ADVANCE_RIP();
4844 } IEM_MC_ELSE() {
4845 IEM_MC_REL_JMP_S32(i32Imm);
4846 } IEM_MC_ENDIF();
4847 IEM_MC_END();
4848 }
4849 return VINF_SUCCESS;
4850}
4851
4852
4853/** Opcode 0x0f 0x84. */
4854FNIEMOP_DEF(iemOp_je_Jv)
4855{
4856 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4857 IEMOP_HLP_MIN_386();
4858 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4859 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4860 {
4861 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4863
4864 IEM_MC_BEGIN(0, 0);
4865 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4866 IEM_MC_REL_JMP_S16(i16Imm);
4867 } IEM_MC_ELSE() {
4868 IEM_MC_ADVANCE_RIP();
4869 } IEM_MC_ENDIF();
4870 IEM_MC_END();
4871 }
4872 else
4873 {
4874 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4876
4877 IEM_MC_BEGIN(0, 0);
4878 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4879 IEM_MC_REL_JMP_S32(i32Imm);
4880 } IEM_MC_ELSE() {
4881 IEM_MC_ADVANCE_RIP();
4882 } IEM_MC_ENDIF();
4883 IEM_MC_END();
4884 }
4885 return VINF_SUCCESS;
4886}
4887
4888
4889/** Opcode 0x0f 0x85. */
4890FNIEMOP_DEF(iemOp_jne_Jv)
4891{
4892 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4893 IEMOP_HLP_MIN_386();
4894 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4895 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4896 {
4897 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4899
4900 IEM_MC_BEGIN(0, 0);
4901 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4902 IEM_MC_ADVANCE_RIP();
4903 } IEM_MC_ELSE() {
4904 IEM_MC_REL_JMP_S16(i16Imm);
4905 } IEM_MC_ENDIF();
4906 IEM_MC_END();
4907 }
4908 else
4909 {
4910 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4912
4913 IEM_MC_BEGIN(0, 0);
4914 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4915 IEM_MC_ADVANCE_RIP();
4916 } IEM_MC_ELSE() {
4917 IEM_MC_REL_JMP_S32(i32Imm);
4918 } IEM_MC_ENDIF();
4919 IEM_MC_END();
4920 }
4921 return VINF_SUCCESS;
4922}
4923
4924
4925/** Opcode 0x0f 0x86. */
4926FNIEMOP_DEF(iemOp_jbe_Jv)
4927{
4928 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4929 IEMOP_HLP_MIN_386();
4930 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4931 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4932 {
4933 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4935
4936 IEM_MC_BEGIN(0, 0);
4937 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4938 IEM_MC_REL_JMP_S16(i16Imm);
4939 } IEM_MC_ELSE() {
4940 IEM_MC_ADVANCE_RIP();
4941 } IEM_MC_ENDIF();
4942 IEM_MC_END();
4943 }
4944 else
4945 {
4946 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4948
4949 IEM_MC_BEGIN(0, 0);
4950 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4951 IEM_MC_REL_JMP_S32(i32Imm);
4952 } IEM_MC_ELSE() {
4953 IEM_MC_ADVANCE_RIP();
4954 } IEM_MC_ENDIF();
4955 IEM_MC_END();
4956 }
4957 return VINF_SUCCESS;
4958}
4959
4960
4961/** Opcode 0x0f 0x87. */
4962FNIEMOP_DEF(iemOp_jnbe_Jv)
4963{
4964 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4965 IEMOP_HLP_MIN_386();
4966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4967 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4968 {
4969 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4971
4972 IEM_MC_BEGIN(0, 0);
4973 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4974 IEM_MC_ADVANCE_RIP();
4975 } IEM_MC_ELSE() {
4976 IEM_MC_REL_JMP_S16(i16Imm);
4977 } IEM_MC_ENDIF();
4978 IEM_MC_END();
4979 }
4980 else
4981 {
4982 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4984
4985 IEM_MC_BEGIN(0, 0);
4986 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4987 IEM_MC_ADVANCE_RIP();
4988 } IEM_MC_ELSE() {
4989 IEM_MC_REL_JMP_S32(i32Imm);
4990 } IEM_MC_ENDIF();
4991 IEM_MC_END();
4992 }
4993 return VINF_SUCCESS;
4994}
4995
4996
4997/** Opcode 0x0f 0x88. */
4998FNIEMOP_DEF(iemOp_js_Jv)
4999{
5000 IEMOP_MNEMONIC(js_Jv, "js Jv");
5001 IEMOP_HLP_MIN_386();
5002 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5003 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5004 {
5005 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5007
5008 IEM_MC_BEGIN(0, 0);
5009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5010 IEM_MC_REL_JMP_S16(i16Imm);
5011 } IEM_MC_ELSE() {
5012 IEM_MC_ADVANCE_RIP();
5013 } IEM_MC_ENDIF();
5014 IEM_MC_END();
5015 }
5016 else
5017 {
5018 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5020
5021 IEM_MC_BEGIN(0, 0);
5022 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5023 IEM_MC_REL_JMP_S32(i32Imm);
5024 } IEM_MC_ELSE() {
5025 IEM_MC_ADVANCE_RIP();
5026 } IEM_MC_ENDIF();
5027 IEM_MC_END();
5028 }
5029 return VINF_SUCCESS;
5030}
5031
5032
5033/** Opcode 0x0f 0x89. */
5034FNIEMOP_DEF(iemOp_jns_Jv)
5035{
5036 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5037 IEMOP_HLP_MIN_386();
5038 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5039 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5040 {
5041 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5043
5044 IEM_MC_BEGIN(0, 0);
5045 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5046 IEM_MC_ADVANCE_RIP();
5047 } IEM_MC_ELSE() {
5048 IEM_MC_REL_JMP_S16(i16Imm);
5049 } IEM_MC_ENDIF();
5050 IEM_MC_END();
5051 }
5052 else
5053 {
5054 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5056
5057 IEM_MC_BEGIN(0, 0);
5058 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5059 IEM_MC_ADVANCE_RIP();
5060 } IEM_MC_ELSE() {
5061 IEM_MC_REL_JMP_S32(i32Imm);
5062 } IEM_MC_ENDIF();
5063 IEM_MC_END();
5064 }
5065 return VINF_SUCCESS;
5066}
5067
5068
5069/** Opcode 0x0f 0x8a. */
5070FNIEMOP_DEF(iemOp_jp_Jv)
5071{
5072 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5073 IEMOP_HLP_MIN_386();
5074 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5075 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5076 {
5077 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5079
5080 IEM_MC_BEGIN(0, 0);
5081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5082 IEM_MC_REL_JMP_S16(i16Imm);
5083 } IEM_MC_ELSE() {
5084 IEM_MC_ADVANCE_RIP();
5085 } IEM_MC_ENDIF();
5086 IEM_MC_END();
5087 }
5088 else
5089 {
5090 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5092
5093 IEM_MC_BEGIN(0, 0);
5094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5095 IEM_MC_REL_JMP_S32(i32Imm);
5096 } IEM_MC_ELSE() {
5097 IEM_MC_ADVANCE_RIP();
5098 } IEM_MC_ENDIF();
5099 IEM_MC_END();
5100 }
5101 return VINF_SUCCESS;
5102}
5103
5104
5105/** Opcode 0x0f 0x8b. */
5106FNIEMOP_DEF(iemOp_jnp_Jv)
5107{
5108 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5109 IEMOP_HLP_MIN_386();
5110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5111 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5112 {
5113 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5115
5116 IEM_MC_BEGIN(0, 0);
5117 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5118 IEM_MC_ADVANCE_RIP();
5119 } IEM_MC_ELSE() {
5120 IEM_MC_REL_JMP_S16(i16Imm);
5121 } IEM_MC_ENDIF();
5122 IEM_MC_END();
5123 }
5124 else
5125 {
5126 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5128
5129 IEM_MC_BEGIN(0, 0);
5130 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5131 IEM_MC_ADVANCE_RIP();
5132 } IEM_MC_ELSE() {
5133 IEM_MC_REL_JMP_S32(i32Imm);
5134 } IEM_MC_ENDIF();
5135 IEM_MC_END();
5136 }
5137 return VINF_SUCCESS;
5138}
5139
5140
5141/** Opcode 0x0f 0x8c. */
5142FNIEMOP_DEF(iemOp_jl_Jv)
5143{
5144 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5145 IEMOP_HLP_MIN_386();
5146 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5147 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5148 {
5149 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5151
5152 IEM_MC_BEGIN(0, 0);
5153 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5154 IEM_MC_REL_JMP_S16(i16Imm);
5155 } IEM_MC_ELSE() {
5156 IEM_MC_ADVANCE_RIP();
5157 } IEM_MC_ENDIF();
5158 IEM_MC_END();
5159 }
5160 else
5161 {
5162 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5164
5165 IEM_MC_BEGIN(0, 0);
5166 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5167 IEM_MC_REL_JMP_S32(i32Imm);
5168 } IEM_MC_ELSE() {
5169 IEM_MC_ADVANCE_RIP();
5170 } IEM_MC_ENDIF();
5171 IEM_MC_END();
5172 }
5173 return VINF_SUCCESS;
5174}
5175
5176
5177/** Opcode 0x0f 0x8d. */
5178FNIEMOP_DEF(iemOp_jnl_Jv)
5179{
5180 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5181 IEMOP_HLP_MIN_386();
5182 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5183 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5184 {
5185 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187
5188 IEM_MC_BEGIN(0, 0);
5189 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5190 IEM_MC_ADVANCE_RIP();
5191 } IEM_MC_ELSE() {
5192 IEM_MC_REL_JMP_S16(i16Imm);
5193 } IEM_MC_ENDIF();
5194 IEM_MC_END();
5195 }
5196 else
5197 {
5198 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5200
5201 IEM_MC_BEGIN(0, 0);
5202 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5203 IEM_MC_ADVANCE_RIP();
5204 } IEM_MC_ELSE() {
5205 IEM_MC_REL_JMP_S32(i32Imm);
5206 } IEM_MC_ENDIF();
5207 IEM_MC_END();
5208 }
5209 return VINF_SUCCESS;
5210}
5211
5212
5213/** Opcode 0x0f 0x8e. */
5214FNIEMOP_DEF(iemOp_jle_Jv)
5215{
5216 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5217 IEMOP_HLP_MIN_386();
5218 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5219 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5220 {
5221 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5223
5224 IEM_MC_BEGIN(0, 0);
5225 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5226 IEM_MC_REL_JMP_S16(i16Imm);
5227 } IEM_MC_ELSE() {
5228 IEM_MC_ADVANCE_RIP();
5229 } IEM_MC_ENDIF();
5230 IEM_MC_END();
5231 }
5232 else
5233 {
5234 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5236
5237 IEM_MC_BEGIN(0, 0);
5238 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5239 IEM_MC_REL_JMP_S32(i32Imm);
5240 } IEM_MC_ELSE() {
5241 IEM_MC_ADVANCE_RIP();
5242 } IEM_MC_ENDIF();
5243 IEM_MC_END();
5244 }
5245 return VINF_SUCCESS;
5246}
5247
5248
5249/** Opcode 0x0f 0x8f. */
5250FNIEMOP_DEF(iemOp_jnle_Jv)
5251{
5252 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5253 IEMOP_HLP_MIN_386();
5254 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5255 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5256 {
5257 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259
5260 IEM_MC_BEGIN(0, 0);
5261 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5262 IEM_MC_ADVANCE_RIP();
5263 } IEM_MC_ELSE() {
5264 IEM_MC_REL_JMP_S16(i16Imm);
5265 } IEM_MC_ENDIF();
5266 IEM_MC_END();
5267 }
5268 else
5269 {
5270 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5272
5273 IEM_MC_BEGIN(0, 0);
5274 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5275 IEM_MC_ADVANCE_RIP();
5276 } IEM_MC_ELSE() {
5277 IEM_MC_REL_JMP_S32(i32Imm);
5278 } IEM_MC_ENDIF();
5279 IEM_MC_END();
5280 }
5281 return VINF_SUCCESS;
5282}
5283
5284
5285/** Opcode 0x0f 0x90. */
5286FNIEMOP_DEF(iemOp_seto_Eb)
5287{
5288 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5289 IEMOP_HLP_MIN_386();
5290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5291
5292 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5293 * any way. AMD says it's "unused", whatever that means. We're
5294 * ignoring for now. */
5295 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5296 {
5297 /* register target */
5298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5299 IEM_MC_BEGIN(0, 0);
5300 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5301 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5302 } IEM_MC_ELSE() {
5303 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5304 } IEM_MC_ENDIF();
5305 IEM_MC_ADVANCE_RIP();
5306 IEM_MC_END();
5307 }
5308 else
5309 {
5310 /* memory target */
5311 IEM_MC_BEGIN(0, 1);
5312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5316 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5317 } IEM_MC_ELSE() {
5318 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5319 } IEM_MC_ENDIF();
5320 IEM_MC_ADVANCE_RIP();
5321 IEM_MC_END();
5322 }
5323 return VINF_SUCCESS;
5324}
5325
5326
5327/** Opcode 0x0f 0x91. */
5328FNIEMOP_DEF(iemOp_setno_Eb)
5329{
5330 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5331 IEMOP_HLP_MIN_386();
5332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5333
5334 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5335 * any way. AMD says it's "unused", whatever that means. We're
5336 * ignoring for now. */
5337 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5338 {
5339 /* register target */
5340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5341 IEM_MC_BEGIN(0, 0);
5342 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5343 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5344 } IEM_MC_ELSE() {
5345 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5346 } IEM_MC_ENDIF();
5347 IEM_MC_ADVANCE_RIP();
5348 IEM_MC_END();
5349 }
5350 else
5351 {
5352 /* memory target */
5353 IEM_MC_BEGIN(0, 1);
5354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5357 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5358 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5359 } IEM_MC_ELSE() {
5360 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5361 } IEM_MC_ENDIF();
5362 IEM_MC_ADVANCE_RIP();
5363 IEM_MC_END();
5364 }
5365 return VINF_SUCCESS;
5366}
5367
5368
5369/** Opcode 0x0f 0x92. */
5370FNIEMOP_DEF(iemOp_setc_Eb)
5371{
5372 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5373 IEMOP_HLP_MIN_386();
5374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5375
5376 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5377 * any way. AMD says it's "unused", whatever that means. We're
5378 * ignoring for now. */
5379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5380 {
5381 /* register target */
5382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5383 IEM_MC_BEGIN(0, 0);
5384 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5385 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5386 } IEM_MC_ELSE() {
5387 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5388 } IEM_MC_ENDIF();
5389 IEM_MC_ADVANCE_RIP();
5390 IEM_MC_END();
5391 }
5392 else
5393 {
5394 /* memory target */
5395 IEM_MC_BEGIN(0, 1);
5396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5399 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5400 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5401 } IEM_MC_ELSE() {
5402 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5403 } IEM_MC_ENDIF();
5404 IEM_MC_ADVANCE_RIP();
5405 IEM_MC_END();
5406 }
5407 return VINF_SUCCESS;
5408}
5409
5410
5411/** Opcode 0x0f 0x93. */
5412FNIEMOP_DEF(iemOp_setnc_Eb)
5413{
5414 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5415 IEMOP_HLP_MIN_386();
5416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5417
5418 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5419 * any way. AMD says it's "unused", whatever that means. We're
5420 * ignoring for now. */
5421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5422 {
5423 /* register target */
5424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5425 IEM_MC_BEGIN(0, 0);
5426 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5427 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5428 } IEM_MC_ELSE() {
5429 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5430 } IEM_MC_ENDIF();
5431 IEM_MC_ADVANCE_RIP();
5432 IEM_MC_END();
5433 }
5434 else
5435 {
5436 /* memory target */
5437 IEM_MC_BEGIN(0, 1);
5438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5441 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5442 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5443 } IEM_MC_ELSE() {
5444 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5445 } IEM_MC_ENDIF();
5446 IEM_MC_ADVANCE_RIP();
5447 IEM_MC_END();
5448 }
5449 return VINF_SUCCESS;
5450}
5451
5452
5453/** Opcode 0x0f 0x94. */
5454FNIEMOP_DEF(iemOp_sete_Eb)
5455{
5456 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5457 IEMOP_HLP_MIN_386();
5458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5459
5460 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5461 * any way. AMD says it's "unused", whatever that means. We're
5462 * ignoring for now. */
5463 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5464 {
5465 /* register target */
5466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5467 IEM_MC_BEGIN(0, 0);
5468 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5469 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5470 } IEM_MC_ELSE() {
5471 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5472 } IEM_MC_ENDIF();
5473 IEM_MC_ADVANCE_RIP();
5474 IEM_MC_END();
5475 }
5476 else
5477 {
5478 /* memory target */
5479 IEM_MC_BEGIN(0, 1);
5480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5484 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5485 } IEM_MC_ELSE() {
5486 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5487 } IEM_MC_ENDIF();
5488 IEM_MC_ADVANCE_RIP();
5489 IEM_MC_END();
5490 }
5491 return VINF_SUCCESS;
5492}
5493
5494
5495/** Opcode 0x0f 0x95. */
5496FNIEMOP_DEF(iemOp_setne_Eb)
5497{
5498 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5499 IEMOP_HLP_MIN_386();
5500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5501
5502 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5503 * any way. AMD says it's "unused", whatever that means. We're
5504 * ignoring for now. */
5505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5506 {
5507 /* register target */
5508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5509 IEM_MC_BEGIN(0, 0);
5510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5511 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5512 } IEM_MC_ELSE() {
5513 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5514 } IEM_MC_ENDIF();
5515 IEM_MC_ADVANCE_RIP();
5516 IEM_MC_END();
5517 }
5518 else
5519 {
5520 /* memory target */
5521 IEM_MC_BEGIN(0, 1);
5522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5526 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5527 } IEM_MC_ELSE() {
5528 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5529 } IEM_MC_ENDIF();
5530 IEM_MC_ADVANCE_RIP();
5531 IEM_MC_END();
5532 }
5533 return VINF_SUCCESS;
5534}
5535
5536
5537/** Opcode 0x0f 0x96. */
5538FNIEMOP_DEF(iemOp_setbe_Eb)
5539{
5540 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5541 IEMOP_HLP_MIN_386();
5542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5543
5544 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5545 * any way. AMD says it's "unused", whatever that means. We're
5546 * ignoring for now. */
5547 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5548 {
5549 /* register target */
5550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5551 IEM_MC_BEGIN(0, 0);
5552 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5553 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5554 } IEM_MC_ELSE() {
5555 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5556 } IEM_MC_ENDIF();
5557 IEM_MC_ADVANCE_RIP();
5558 IEM_MC_END();
5559 }
5560 else
5561 {
5562 /* memory target */
5563 IEM_MC_BEGIN(0, 1);
5564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5567 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5568 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5569 } IEM_MC_ELSE() {
5570 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5571 } IEM_MC_ENDIF();
5572 IEM_MC_ADVANCE_RIP();
5573 IEM_MC_END();
5574 }
5575 return VINF_SUCCESS;
5576}
5577
5578
5579/** Opcode 0x0f 0x97. */
5580FNIEMOP_DEF(iemOp_setnbe_Eb)
5581{
5582 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5583 IEMOP_HLP_MIN_386();
5584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5585
5586 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5587 * any way. AMD says it's "unused", whatever that means. We're
5588 * ignoring for now. */
5589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5590 {
5591 /* register target */
5592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5593 IEM_MC_BEGIN(0, 0);
5594 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5595 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5596 } IEM_MC_ELSE() {
5597 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5598 } IEM_MC_ENDIF();
5599 IEM_MC_ADVANCE_RIP();
5600 IEM_MC_END();
5601 }
5602 else
5603 {
5604 /* memory target */
5605 IEM_MC_BEGIN(0, 1);
5606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5609 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5610 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5611 } IEM_MC_ELSE() {
5612 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5613 } IEM_MC_ENDIF();
5614 IEM_MC_ADVANCE_RIP();
5615 IEM_MC_END();
5616 }
5617 return VINF_SUCCESS;
5618}
5619
5620
5621/** Opcode 0x0f 0x98. */
5622FNIEMOP_DEF(iemOp_sets_Eb)
5623{
5624 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5625 IEMOP_HLP_MIN_386();
5626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5627
5628 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5629 * any way. AMD says it's "unused", whatever that means. We're
5630 * ignoring for now. */
5631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5632 {
5633 /* register target */
5634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5635 IEM_MC_BEGIN(0, 0);
5636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5637 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5638 } IEM_MC_ELSE() {
5639 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5640 } IEM_MC_ENDIF();
5641 IEM_MC_ADVANCE_RIP();
5642 IEM_MC_END();
5643 }
5644 else
5645 {
5646 /* memory target */
5647 IEM_MC_BEGIN(0, 1);
5648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5651 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5652 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5653 } IEM_MC_ELSE() {
5654 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5655 } IEM_MC_ENDIF();
5656 IEM_MC_ADVANCE_RIP();
5657 IEM_MC_END();
5658 }
5659 return VINF_SUCCESS;
5660}
5661
5662
5663/** Opcode 0x0f 0x99. */
5664FNIEMOP_DEF(iemOp_setns_Eb)
5665{
5666 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5667 IEMOP_HLP_MIN_386();
5668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5669
5670 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5671 * any way. AMD says it's "unused", whatever that means. We're
5672 * ignoring for now. */
5673 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5674 {
5675 /* register target */
5676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5677 IEM_MC_BEGIN(0, 0);
5678 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5679 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5680 } IEM_MC_ELSE() {
5681 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5682 } IEM_MC_ENDIF();
5683 IEM_MC_ADVANCE_RIP();
5684 IEM_MC_END();
5685 }
5686 else
5687 {
5688 /* memory target */
5689 IEM_MC_BEGIN(0, 1);
5690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5693 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5694 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5695 } IEM_MC_ELSE() {
5696 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5697 } IEM_MC_ENDIF();
5698 IEM_MC_ADVANCE_RIP();
5699 IEM_MC_END();
5700 }
5701 return VINF_SUCCESS;
5702}
5703
5704
5705/** Opcode 0x0f 0x9a. */
5706FNIEMOP_DEF(iemOp_setp_Eb)
5707{
5708 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5709 IEMOP_HLP_MIN_386();
5710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5711
5712 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5713 * any way. AMD says it's "unused", whatever that means. We're
5714 * ignoring for now. */
5715 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5716 {
5717 /* register target */
5718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5719 IEM_MC_BEGIN(0, 0);
5720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5721 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5722 } IEM_MC_ELSE() {
5723 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5724 } IEM_MC_ENDIF();
5725 IEM_MC_ADVANCE_RIP();
5726 IEM_MC_END();
5727 }
5728 else
5729 {
5730 /* memory target */
5731 IEM_MC_BEGIN(0, 1);
5732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5736 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5737 } IEM_MC_ELSE() {
5738 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5739 } IEM_MC_ENDIF();
5740 IEM_MC_ADVANCE_RIP();
5741 IEM_MC_END();
5742 }
5743 return VINF_SUCCESS;
5744}
5745
5746
5747/** Opcode 0x0f 0x9b. */
5748FNIEMOP_DEF(iemOp_setnp_Eb)
5749{
5750 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5751 IEMOP_HLP_MIN_386();
5752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5753
5754 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5755 * any way. AMD says it's "unused", whatever that means. We're
5756 * ignoring for now. */
5757 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5758 {
5759 /* register target */
5760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5761 IEM_MC_BEGIN(0, 0);
5762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5763 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5764 } IEM_MC_ELSE() {
5765 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5766 } IEM_MC_ENDIF();
5767 IEM_MC_ADVANCE_RIP();
5768 IEM_MC_END();
5769 }
5770 else
5771 {
5772 /* memory target */
5773 IEM_MC_BEGIN(0, 1);
5774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5778 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5779 } IEM_MC_ELSE() {
5780 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5781 } IEM_MC_ENDIF();
5782 IEM_MC_ADVANCE_RIP();
5783 IEM_MC_END();
5784 }
5785 return VINF_SUCCESS;
5786}
5787
5788
5789/** Opcode 0x0f 0x9c. */
5790FNIEMOP_DEF(iemOp_setl_Eb)
5791{
5792 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5793 IEMOP_HLP_MIN_386();
5794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5795
5796 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5797 * any way. AMD says it's "unused", whatever that means. We're
5798 * ignoring for now. */
5799 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5800 {
5801 /* register target */
5802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5803 IEM_MC_BEGIN(0, 0);
5804 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5805 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5806 } IEM_MC_ELSE() {
5807 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5808 } IEM_MC_ENDIF();
5809 IEM_MC_ADVANCE_RIP();
5810 IEM_MC_END();
5811 }
5812 else
5813 {
5814 /* memory target */
5815 IEM_MC_BEGIN(0, 1);
5816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5819 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5820 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5821 } IEM_MC_ELSE() {
5822 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5823 } IEM_MC_ENDIF();
5824 IEM_MC_ADVANCE_RIP();
5825 IEM_MC_END();
5826 }
5827 return VINF_SUCCESS;
5828}
5829
5830
5831/** Opcode 0x0f 0x9d. */
5832FNIEMOP_DEF(iemOp_setnl_Eb)
5833{
5834 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5835 IEMOP_HLP_MIN_386();
5836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5837
5838 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5839 * any way. AMD says it's "unused", whatever that means. We're
5840 * ignoring for now. */
5841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5842 {
5843 /* register target */
5844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5845 IEM_MC_BEGIN(0, 0);
5846 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5847 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5848 } IEM_MC_ELSE() {
5849 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5850 } IEM_MC_ENDIF();
5851 IEM_MC_ADVANCE_RIP();
5852 IEM_MC_END();
5853 }
5854 else
5855 {
5856 /* memory target */
5857 IEM_MC_BEGIN(0, 1);
5858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5861 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5862 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5863 } IEM_MC_ELSE() {
5864 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5865 } IEM_MC_ENDIF();
5866 IEM_MC_ADVANCE_RIP();
5867 IEM_MC_END();
5868 }
5869 return VINF_SUCCESS;
5870}
5871
5872
5873/** Opcode 0x0f 0x9e. */
5874FNIEMOP_DEF(iemOp_setle_Eb)
5875{
5876 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5877 IEMOP_HLP_MIN_386();
5878 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5879
5880 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5881 * any way. AMD says it's "unused", whatever that means. We're
5882 * ignoring for now. */
5883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5884 {
5885 /* register target */
5886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5887 IEM_MC_BEGIN(0, 0);
5888 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5889 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5890 } IEM_MC_ELSE() {
5891 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5892 } IEM_MC_ENDIF();
5893 IEM_MC_ADVANCE_RIP();
5894 IEM_MC_END();
5895 }
5896 else
5897 {
5898 /* memory target */
5899 IEM_MC_BEGIN(0, 1);
5900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5903 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5904 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5905 } IEM_MC_ELSE() {
5906 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5907 } IEM_MC_ENDIF();
5908 IEM_MC_ADVANCE_RIP();
5909 IEM_MC_END();
5910 }
5911 return VINF_SUCCESS;
5912}
5913
5914
5915/** Opcode 0x0f 0x9f. */
5916FNIEMOP_DEF(iemOp_setnle_Eb)
5917{
5918 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5919 IEMOP_HLP_MIN_386();
5920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5921
5922 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5923 * any way. AMD says it's "unused", whatever that means. We're
5924 * ignoring for now. */
5925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5926 {
5927 /* register target */
5928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5929 IEM_MC_BEGIN(0, 0);
5930 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5931 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5932 } IEM_MC_ELSE() {
5933 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5934 } IEM_MC_ENDIF();
5935 IEM_MC_ADVANCE_RIP();
5936 IEM_MC_END();
5937 }
5938 else
5939 {
5940 /* memory target */
5941 IEM_MC_BEGIN(0, 1);
5942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5945 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5946 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5947 } IEM_MC_ELSE() {
5948 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5949 } IEM_MC_ENDIF();
5950 IEM_MC_ADVANCE_RIP();
5951 IEM_MC_END();
5952 }
5953 return VINF_SUCCESS;
5954}
5955
5956
5957/**
5958 * Common 'push segment-register' helper.
5959 */
5960FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5961{
5962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5963 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5964 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5965
5966 switch (pVCpu->iem.s.enmEffOpSize)
5967 {
5968 case IEMMODE_16BIT:
5969 IEM_MC_BEGIN(0, 1);
5970 IEM_MC_LOCAL(uint16_t, u16Value);
5971 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5972 IEM_MC_PUSH_U16(u16Value);
5973 IEM_MC_ADVANCE_RIP();
5974 IEM_MC_END();
5975 break;
5976
5977 case IEMMODE_32BIT:
5978 IEM_MC_BEGIN(0, 1);
5979 IEM_MC_LOCAL(uint32_t, u32Value);
5980 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5981 IEM_MC_PUSH_U32_SREG(u32Value);
5982 IEM_MC_ADVANCE_RIP();
5983 IEM_MC_END();
5984 break;
5985
5986 case IEMMODE_64BIT:
5987 IEM_MC_BEGIN(0, 1);
5988 IEM_MC_LOCAL(uint64_t, u64Value);
5989 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5990 IEM_MC_PUSH_U64(u64Value);
5991 IEM_MC_ADVANCE_RIP();
5992 IEM_MC_END();
5993 break;
5994 }
5995
5996 return VINF_SUCCESS;
5997}
5998
5999
6000/** Opcode 0x0f 0xa0. */
6001FNIEMOP_DEF(iemOp_push_fs)
6002{
6003 IEMOP_MNEMONIC(push_fs, "push fs");
6004 IEMOP_HLP_MIN_386();
6005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6006 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6007}
6008
6009
6010/** Opcode 0x0f 0xa1. */
6011FNIEMOP_DEF(iemOp_pop_fs)
6012{
6013 IEMOP_MNEMONIC(pop_fs, "pop fs");
6014 IEMOP_HLP_MIN_386();
6015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6016 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6017}
6018
6019
6020/** Opcode 0x0f 0xa2. */
6021FNIEMOP_DEF(iemOp_cpuid)
6022{
6023 IEMOP_MNEMONIC(cpuid, "cpuid");
6024 IEMOP_HLP_MIN_486(); /* not all 486es. */
6025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6026 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6027}
6028
6029
6030/**
6031 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6032 * iemOp_bts_Ev_Gv.
6033 */
6034FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6035{
6036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6037 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6038
6039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6040 {
6041 /* register destination. */
6042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6043 switch (pVCpu->iem.s.enmEffOpSize)
6044 {
6045 case IEMMODE_16BIT:
6046 IEM_MC_BEGIN(3, 0);
6047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6048 IEM_MC_ARG(uint16_t, u16Src, 1);
6049 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6050
6051 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6052 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6053 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6054 IEM_MC_REF_EFLAGS(pEFlags);
6055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6056
6057 IEM_MC_ADVANCE_RIP();
6058 IEM_MC_END();
6059 return VINF_SUCCESS;
6060
6061 case IEMMODE_32BIT:
6062 IEM_MC_BEGIN(3, 0);
6063 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6064 IEM_MC_ARG(uint32_t, u32Src, 1);
6065 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6066
6067 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6068 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6069 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6070 IEM_MC_REF_EFLAGS(pEFlags);
6071 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6072
6073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6074 IEM_MC_ADVANCE_RIP();
6075 IEM_MC_END();
6076 return VINF_SUCCESS;
6077
6078 case IEMMODE_64BIT:
6079 IEM_MC_BEGIN(3, 0);
6080 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6081 IEM_MC_ARG(uint64_t, u64Src, 1);
6082 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6083
6084 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6085 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6086 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6087 IEM_MC_REF_EFLAGS(pEFlags);
6088 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6089
6090 IEM_MC_ADVANCE_RIP();
6091 IEM_MC_END();
6092 return VINF_SUCCESS;
6093
6094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6095 }
6096 }
6097 else
6098 {
6099 /* memory destination. */
6100
6101 uint32_t fAccess;
6102 if (pImpl->pfnLockedU16)
6103 fAccess = IEM_ACCESS_DATA_RW;
6104 else /* BT */
6105 fAccess = IEM_ACCESS_DATA_R;
6106
6107 /** @todo test negative bit offsets! */
6108 switch (pVCpu->iem.s.enmEffOpSize)
6109 {
6110 case IEMMODE_16BIT:
6111 IEM_MC_BEGIN(3, 2);
6112 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6113 IEM_MC_ARG(uint16_t, u16Src, 1);
6114 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6116 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6117
6118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6119 if (pImpl->pfnLockedU16)
6120 IEMOP_HLP_DONE_DECODING();
6121 else
6122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6123 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6124 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6125 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6126 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6127 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6128 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6129 IEM_MC_FETCH_EFLAGS(EFlags);
6130
6131 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6132 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6134 else
6135 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6137
6138 IEM_MC_COMMIT_EFLAGS(EFlags);
6139 IEM_MC_ADVANCE_RIP();
6140 IEM_MC_END();
6141 return VINF_SUCCESS;
6142
6143 case IEMMODE_32BIT:
6144 IEM_MC_BEGIN(3, 2);
6145 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6146 IEM_MC_ARG(uint32_t, u32Src, 1);
6147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6149 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6150
6151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6152 if (pImpl->pfnLockedU16)
6153 IEMOP_HLP_DONE_DECODING();
6154 else
6155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6156 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6157 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6158 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6159 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6160 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6161 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6162 IEM_MC_FETCH_EFLAGS(EFlags);
6163
6164 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6165 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6166 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6167 else
6168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6169 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6170
6171 IEM_MC_COMMIT_EFLAGS(EFlags);
6172 IEM_MC_ADVANCE_RIP();
6173 IEM_MC_END();
6174 return VINF_SUCCESS;
6175
6176 case IEMMODE_64BIT:
6177 IEM_MC_BEGIN(3, 2);
6178 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6179 IEM_MC_ARG(uint64_t, u64Src, 1);
6180 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6182 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6183
6184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6185 if (pImpl->pfnLockedU16)
6186 IEMOP_HLP_DONE_DECODING();
6187 else
6188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6189 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6190 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6191 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6192 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6193 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6194 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6195 IEM_MC_FETCH_EFLAGS(EFlags);
6196
6197 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6198 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6200 else
6201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6203
6204 IEM_MC_COMMIT_EFLAGS(EFlags);
6205 IEM_MC_ADVANCE_RIP();
6206 IEM_MC_END();
6207 return VINF_SUCCESS;
6208
6209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6210 }
6211 }
6212}
6213
6214
6215/** Opcode 0x0f 0xa3. */
6216FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6217{
6218 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6219 IEMOP_HLP_MIN_386();
6220 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6221}
6222
6223
6224/**
6225 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6226 */
6227FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6228{
6229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6230 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6231
6232 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6233 {
6234 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6236
6237 switch (pVCpu->iem.s.enmEffOpSize)
6238 {
6239 case IEMMODE_16BIT:
6240 IEM_MC_BEGIN(4, 0);
6241 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6242 IEM_MC_ARG(uint16_t, u16Src, 1);
6243 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6244 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6245
6246 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6247 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6248 IEM_MC_REF_EFLAGS(pEFlags);
6249 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6250
6251 IEM_MC_ADVANCE_RIP();
6252 IEM_MC_END();
6253 return VINF_SUCCESS;
6254
6255 case IEMMODE_32BIT:
6256 IEM_MC_BEGIN(4, 0);
6257 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6258 IEM_MC_ARG(uint32_t, u32Src, 1);
6259 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6260 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6261
6262 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6263 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6264 IEM_MC_REF_EFLAGS(pEFlags);
6265 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6266
6267 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6268 IEM_MC_ADVANCE_RIP();
6269 IEM_MC_END();
6270 return VINF_SUCCESS;
6271
6272 case IEMMODE_64BIT:
6273 IEM_MC_BEGIN(4, 0);
6274 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6275 IEM_MC_ARG(uint64_t, u64Src, 1);
6276 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6277 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6278
6279 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6280 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6281 IEM_MC_REF_EFLAGS(pEFlags);
6282 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6283
6284 IEM_MC_ADVANCE_RIP();
6285 IEM_MC_END();
6286 return VINF_SUCCESS;
6287
6288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6289 }
6290 }
6291 else
6292 {
6293 switch (pVCpu->iem.s.enmEffOpSize)
6294 {
6295 case IEMMODE_16BIT:
6296 IEM_MC_BEGIN(4, 2);
6297 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6298 IEM_MC_ARG(uint16_t, u16Src, 1);
6299 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6300 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6302
6303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6304 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6305 IEM_MC_ASSIGN(cShiftArg, cShift);
6306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6307 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6308 IEM_MC_FETCH_EFLAGS(EFlags);
6309 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6310 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6311
6312 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6313 IEM_MC_COMMIT_EFLAGS(EFlags);
6314 IEM_MC_ADVANCE_RIP();
6315 IEM_MC_END();
6316 return VINF_SUCCESS;
6317
6318 case IEMMODE_32BIT:
6319 IEM_MC_BEGIN(4, 2);
6320 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6321 IEM_MC_ARG(uint32_t, u32Src, 1);
6322 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6323 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6325
6326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6327 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6328 IEM_MC_ASSIGN(cShiftArg, cShift);
6329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6330 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6331 IEM_MC_FETCH_EFLAGS(EFlags);
6332 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6333 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6334
6335 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6336 IEM_MC_COMMIT_EFLAGS(EFlags);
6337 IEM_MC_ADVANCE_RIP();
6338 IEM_MC_END();
6339 return VINF_SUCCESS;
6340
6341 case IEMMODE_64BIT:
6342 IEM_MC_BEGIN(4, 2);
6343 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6344 IEM_MC_ARG(uint64_t, u64Src, 1);
6345 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6346 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6348
6349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6350 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6351 IEM_MC_ASSIGN(cShiftArg, cShift);
6352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6353 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6354 IEM_MC_FETCH_EFLAGS(EFlags);
6355 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6356 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6357
6358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6359 IEM_MC_COMMIT_EFLAGS(EFlags);
6360 IEM_MC_ADVANCE_RIP();
6361 IEM_MC_END();
6362 return VINF_SUCCESS;
6363
6364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6365 }
6366 }
6367}
6368
6369
6370/**
6371 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6372 */
6373FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6374{
6375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6376 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6377
6378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6379 {
6380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6381
6382 switch (pVCpu->iem.s.enmEffOpSize)
6383 {
6384 case IEMMODE_16BIT:
6385 IEM_MC_BEGIN(4, 0);
6386 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6387 IEM_MC_ARG(uint16_t, u16Src, 1);
6388 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6389 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6390
6391 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6392 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6393 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6394 IEM_MC_REF_EFLAGS(pEFlags);
6395 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6396
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 return VINF_SUCCESS;
6400
6401 case IEMMODE_32BIT:
6402 IEM_MC_BEGIN(4, 0);
6403 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6404 IEM_MC_ARG(uint32_t, u32Src, 1);
6405 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6406 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6407
6408 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6409 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6410 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6411 IEM_MC_REF_EFLAGS(pEFlags);
6412 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6413
6414 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6415 IEM_MC_ADVANCE_RIP();
6416 IEM_MC_END();
6417 return VINF_SUCCESS;
6418
6419 case IEMMODE_64BIT:
6420 IEM_MC_BEGIN(4, 0);
6421 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6422 IEM_MC_ARG(uint64_t, u64Src, 1);
6423 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6424 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6425
6426 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6427 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6428 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6429 IEM_MC_REF_EFLAGS(pEFlags);
6430 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6431
6432 IEM_MC_ADVANCE_RIP();
6433 IEM_MC_END();
6434 return VINF_SUCCESS;
6435
6436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6437 }
6438 }
6439 else
6440 {
6441 switch (pVCpu->iem.s.enmEffOpSize)
6442 {
6443 case IEMMODE_16BIT:
6444 IEM_MC_BEGIN(4, 2);
6445 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6446 IEM_MC_ARG(uint16_t, u16Src, 1);
6447 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6448 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6450
6451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6453 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6454 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6455 IEM_MC_FETCH_EFLAGS(EFlags);
6456 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6457 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6458
6459 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6460 IEM_MC_COMMIT_EFLAGS(EFlags);
6461 IEM_MC_ADVANCE_RIP();
6462 IEM_MC_END();
6463 return VINF_SUCCESS;
6464
6465 case IEMMODE_32BIT:
6466 IEM_MC_BEGIN(4, 2);
6467 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6468 IEM_MC_ARG(uint32_t, u32Src, 1);
6469 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6470 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6472
6473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6475 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6476 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6477 IEM_MC_FETCH_EFLAGS(EFlags);
6478 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6479 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6480
6481 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6482 IEM_MC_COMMIT_EFLAGS(EFlags);
6483 IEM_MC_ADVANCE_RIP();
6484 IEM_MC_END();
6485 return VINF_SUCCESS;
6486
6487 case IEMMODE_64BIT:
6488 IEM_MC_BEGIN(4, 2);
6489 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6490 IEM_MC_ARG(uint64_t, u64Src, 1);
6491 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6492 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6494
6495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6497 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6498 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6499 IEM_MC_FETCH_EFLAGS(EFlags);
6500 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6501 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6502
6503 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6504 IEM_MC_COMMIT_EFLAGS(EFlags);
6505 IEM_MC_ADVANCE_RIP();
6506 IEM_MC_END();
6507 return VINF_SUCCESS;
6508
6509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6510 }
6511 }
6512}
6513
6514
6515
6516/** Opcode 0x0f 0xa4. */
6517FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6518{
6519 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6520 IEMOP_HLP_MIN_386();
6521 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6522}
6523
6524
6525/** Opcode 0x0f 0xa5. */
6526FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6527{
6528 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6529 IEMOP_HLP_MIN_386();
6530 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6531}
6532
6533
6534/** Opcode 0x0f 0xa8. */
6535FNIEMOP_DEF(iemOp_push_gs)
6536{
6537 IEMOP_MNEMONIC(push_gs, "push gs");
6538 IEMOP_HLP_MIN_386();
6539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6540 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6541}
6542
6543
6544/** Opcode 0x0f 0xa9. */
6545FNIEMOP_DEF(iemOp_pop_gs)
6546{
6547 IEMOP_MNEMONIC(pop_gs, "pop gs");
6548 IEMOP_HLP_MIN_386();
6549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6550 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6551}
6552
6553
6554/** Opcode 0x0f 0xaa. */
6555FNIEMOP_DEF(iemOp_rsm)
6556{
6557 IEMOP_MNEMONIC(rsm, "rsm");
6558 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6559 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6560 * intercept). */
6561 IEMOP_BITCH_ABOUT_STUB();
6562 return IEMOP_RAISE_INVALID_OPCODE();
6563}
6564
6565//IEMOP_HLP_MIN_386();
6566
6567
6568/** Opcode 0x0f 0xab. */
6569FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6570{
6571 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6572 IEMOP_HLP_MIN_386();
6573 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6574}
6575
6576
6577/** Opcode 0x0f 0xac. */
6578FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6579{
6580 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6581 IEMOP_HLP_MIN_386();
6582 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6583}
6584
6585
6586/** Opcode 0x0f 0xad. */
6587FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6588{
6589 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6590 IEMOP_HLP_MIN_386();
6591 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6592}
6593
6594
6595/** Opcode 0x0f 0xae mem/0. */
6596FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6597{
6598 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6599 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6600 return IEMOP_RAISE_INVALID_OPCODE();
6601
6602 IEM_MC_BEGIN(3, 1);
6603 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6604 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6605 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6608 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6609 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6610 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6611 IEM_MC_END();
6612 return VINF_SUCCESS;
6613}
6614
6615
6616/** Opcode 0x0f 0xae mem/1. */
6617FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6618{
6619 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6620 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6621 return IEMOP_RAISE_INVALID_OPCODE();
6622
6623 IEM_MC_BEGIN(3, 1);
6624 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6625 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6626 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6629 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6630 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6631 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6632 IEM_MC_END();
6633 return VINF_SUCCESS;
6634}
6635
6636
6637/**
6638 * @opmaps grp15
6639 * @opcode !11/2
6640 * @oppfx none
6641 * @opcpuid sse
6642 * @opgroup og_sse_mxcsrsm
6643 * @opxcpttype 5
6644 * @optest op1=0 -> mxcsr=0
6645 * @optest op1=0x2083 -> mxcsr=0x2083
6646 * @optest op1=0xfffffffe -> value.xcpt=0xd
6647 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6648 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6649 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6650 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6651 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6652 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6653 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6654 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6655 */
6656FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6657{
6658 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6659 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6660 return IEMOP_RAISE_INVALID_OPCODE();
6661
6662 IEM_MC_BEGIN(2, 0);
6663 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6664 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6667 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6668 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6669 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6670 IEM_MC_END();
6671 return VINF_SUCCESS;
6672}
6673
6674
6675/**
6676 * @opmaps grp15
6677 * @opcode !11/3
6678 * @oppfx none
6679 * @opcpuid sse
6680 * @opgroup og_sse_mxcsrsm
6681 * @opxcpttype 5
6682 * @optest mxcsr=0 -> op1=0
6683 * @optest mxcsr=0x2083 -> op1=0x2083
6684 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6685 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6686 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6687 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6688 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6689 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6690 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6691 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6692 */
6693FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6694{
6695 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6696 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6697 return IEMOP_RAISE_INVALID_OPCODE();
6698
6699 IEM_MC_BEGIN(2, 0);
6700 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6701 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6704 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6705 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6706 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6707 IEM_MC_END();
6708 return VINF_SUCCESS;
6709}
6710
6711
6712/**
6713 * @opmaps grp15
6714 * @opcode !11/4
6715 * @oppfx none
6716 * @opcpuid xsave
6717 * @opgroup og_system
6718 * @opxcpttype none
6719 */
6720FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6721{
6722 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6723 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6724 return IEMOP_RAISE_INVALID_OPCODE();
6725
6726 IEM_MC_BEGIN(3, 0);
6727 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6728 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6729 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6732 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6733 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6734 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6735 IEM_MC_END();
6736 return VINF_SUCCESS;
6737}
6738
6739
6740/**
6741 * @opmaps grp15
6742 * @opcode !11/5
6743 * @oppfx none
6744 * @opcpuid xsave
6745 * @opgroup og_system
6746 * @opxcpttype none
6747 */
6748FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6749{
6750 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6751 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6752 return IEMOP_RAISE_INVALID_OPCODE();
6753
6754 IEM_MC_BEGIN(3, 0);
6755 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6756 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6757 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6760 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6761 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6762 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6763 IEM_MC_END();
6764 return VINF_SUCCESS;
6765}
6766
6767/** Opcode 0x0f 0xae mem/6. */
6768FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6769
6770/**
6771 * @opmaps grp15
6772 * @opcode !11/7
6773 * @oppfx none
6774 * @opcpuid clfsh
6775 * @opgroup og_cachectl
6776 * @optest op1=1 ->
6777 */
6778FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6779{
6780 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6781 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6782 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6783
6784 IEM_MC_BEGIN(2, 0);
6785 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6786 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6789 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6790 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6791 IEM_MC_END();
6792 return VINF_SUCCESS;
6793}
6794
6795/**
6796 * @opmaps grp15
6797 * @opcode !11/7
6798 * @oppfx 0x66
6799 * @opcpuid clflushopt
6800 * @opgroup og_cachectl
6801 * @optest op1=1 ->
6802 */
6803FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6804{
6805 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6806 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6807 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6808
6809 IEM_MC_BEGIN(2, 0);
6810 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6811 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6814 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6815 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6816 IEM_MC_END();
6817 return VINF_SUCCESS;
6818}
6819
6820
6821/** Opcode 0x0f 0xae 11b/5. */
6822FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6823{
6824 RT_NOREF_PV(bRm);
6825 IEMOP_MNEMONIC(lfence, "lfence");
6826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6827 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6828 return IEMOP_RAISE_INVALID_OPCODE();
6829
6830 IEM_MC_BEGIN(0, 0);
6831 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6832 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6833 else
6834 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6835 IEM_MC_ADVANCE_RIP();
6836 IEM_MC_END();
6837 return VINF_SUCCESS;
6838}
6839
6840
6841/** Opcode 0x0f 0xae 11b/6. */
6842FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6843{
6844 RT_NOREF_PV(bRm);
6845 IEMOP_MNEMONIC(mfence, "mfence");
6846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6847 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6848 return IEMOP_RAISE_INVALID_OPCODE();
6849
6850 IEM_MC_BEGIN(0, 0);
6851 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6852 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6853 else
6854 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6855 IEM_MC_ADVANCE_RIP();
6856 IEM_MC_END();
6857 return VINF_SUCCESS;
6858}
6859
6860
6861/** Opcode 0x0f 0xae 11b/7. */
6862FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6863{
6864 RT_NOREF_PV(bRm);
6865 IEMOP_MNEMONIC(sfence, "sfence");
6866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6867 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6868 return IEMOP_RAISE_INVALID_OPCODE();
6869
6870 IEM_MC_BEGIN(0, 0);
6871 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6872 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6873 else
6874 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6875 IEM_MC_ADVANCE_RIP();
6876 IEM_MC_END();
6877 return VINF_SUCCESS;
6878}
6879
6880
6881/** Opcode 0xf3 0x0f 0xae 11b/0. */
6882FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
6883{
6884 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
6885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6886 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6887 {
6888 IEM_MC_BEGIN(1, 0);
6889 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6890 IEM_MC_ARG(uint64_t, u64Dst, 0);
6891 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
6892 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6893 IEM_MC_ADVANCE_RIP();
6894 IEM_MC_END();
6895 }
6896 else
6897 {
6898 IEM_MC_BEGIN(1, 0);
6899 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6900 IEM_MC_ARG(uint32_t, u32Dst, 0);
6901 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
6902 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6903 IEM_MC_ADVANCE_RIP();
6904 IEM_MC_END();
6905 }
6906 return VINF_SUCCESS;
6907}
6908
6909/** Opcode 0xf3 0x0f 0xae 11b/1. */
6910FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
6911{
6912 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
6913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6914 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6915 {
6916 IEM_MC_BEGIN(1, 0);
6917 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6918 IEM_MC_ARG(uint64_t, u64Dst, 0);
6919 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
6920 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6921 IEM_MC_ADVANCE_RIP();
6922 IEM_MC_END();
6923 }
6924 else
6925 {
6926 IEM_MC_BEGIN(1, 0);
6927 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6928 IEM_MC_ARG(uint32_t, u32Dst, 0);
6929 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
6930 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6931 IEM_MC_ADVANCE_RIP();
6932 IEM_MC_END();
6933 }
6934 return VINF_SUCCESS;
6935}
6936
6937/** Opcode 0xf3 0x0f 0xae 11b/2. */
6938FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
6939{
6940 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
6941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6942 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6943 {
6944 IEM_MC_BEGIN(1, 0);
6945 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6946 IEM_MC_ARG(uint64_t, u64Dst, 0);
6947 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6948 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
6949 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
6950 IEM_MC_ADVANCE_RIP();
6951 IEM_MC_END();
6952 }
6953 else
6954 {
6955 IEM_MC_BEGIN(1, 0);
6956 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6957 IEM_MC_ARG(uint32_t, u32Dst, 0);
6958 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6959 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
6960 IEM_MC_ADVANCE_RIP();
6961 IEM_MC_END();
6962 }
6963 return VINF_SUCCESS;
6964}
6965
6966/** Opcode 0xf3 0x0f 0xae 11b/3. */
6967FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
6968{
6969 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
6970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6971 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6972 {
6973 IEM_MC_BEGIN(1, 0);
6974 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6975 IEM_MC_ARG(uint64_t, u64Dst, 0);
6976 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6977 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
6978 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
6979 IEM_MC_ADVANCE_RIP();
6980 IEM_MC_END();
6981 }
6982 else
6983 {
6984 IEM_MC_BEGIN(1, 0);
6985 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6986 IEM_MC_ARG(uint32_t, u32Dst, 0);
6987 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6988 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
6989 IEM_MC_ADVANCE_RIP();
6990 IEM_MC_END();
6991 }
6992 return VINF_SUCCESS;
6993}
6994
6995
6996/**
6997 * Group 15 jump table for register variant.
6998 */
6999IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7000{ /* pfx: none, 066h, 0f3h, 0f2h */
7001 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7002 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7003 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7004 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7005 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7006 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7007 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7008 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7009};
7010AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7011
7012
7013/**
7014 * Group 15 jump table for memory variant.
7015 */
7016IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7017{ /* pfx: none, 066h, 0f3h, 0f2h */
7018 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7019 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7020 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7021 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7022 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7023 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7024 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7025 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7026};
7027AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7028
7029
7030/** Opcode 0x0f 0xae. */
7031FNIEMOP_DEF(iemOp_Grp15)
7032{
7033 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7036 /* register, register */
7037 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7038 + pVCpu->iem.s.idxPrefix], bRm);
7039 /* memory, register */
7040 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7041 + pVCpu->iem.s.idxPrefix], bRm);
7042}
7043
7044
7045/** Opcode 0x0f 0xaf. */
7046FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7047{
7048 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7049 IEMOP_HLP_MIN_386();
7050 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7051 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7052}
7053
7054
7055/** Opcode 0x0f 0xb0. */
7056FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7057{
7058 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7059 IEMOP_HLP_MIN_486();
7060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7061
7062 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7063 {
7064 IEMOP_HLP_DONE_DECODING();
7065 IEM_MC_BEGIN(4, 0);
7066 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7067 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7068 IEM_MC_ARG(uint8_t, u8Src, 2);
7069 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7070
7071 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7072 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7073 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7074 IEM_MC_REF_EFLAGS(pEFlags);
7075 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7076 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7077 else
7078 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7079
7080 IEM_MC_ADVANCE_RIP();
7081 IEM_MC_END();
7082 }
7083 else
7084 {
7085 IEM_MC_BEGIN(4, 3);
7086 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7087 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7088 IEM_MC_ARG(uint8_t, u8Src, 2);
7089 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7091 IEM_MC_LOCAL(uint8_t, u8Al);
7092
7093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7094 IEMOP_HLP_DONE_DECODING();
7095 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7096 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7097 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7098 IEM_MC_FETCH_EFLAGS(EFlags);
7099 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7100 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7101 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7102 else
7103 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7104
7105 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7106 IEM_MC_COMMIT_EFLAGS(EFlags);
7107 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7108 IEM_MC_ADVANCE_RIP();
7109 IEM_MC_END();
7110 }
7111 return VINF_SUCCESS;
7112}
7113
7114/** Opcode 0x0f 0xb1. */
7115FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7116{
7117 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7118 IEMOP_HLP_MIN_486();
7119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7120
7121 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7122 {
7123 IEMOP_HLP_DONE_DECODING();
7124 switch (pVCpu->iem.s.enmEffOpSize)
7125 {
7126 case IEMMODE_16BIT:
7127 IEM_MC_BEGIN(4, 0);
7128 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7129 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7130 IEM_MC_ARG(uint16_t, u16Src, 2);
7131 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7132
7133 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7134 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7135 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7136 IEM_MC_REF_EFLAGS(pEFlags);
7137 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7138 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7139 else
7140 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7141
7142 IEM_MC_ADVANCE_RIP();
7143 IEM_MC_END();
7144 return VINF_SUCCESS;
7145
7146 case IEMMODE_32BIT:
7147 IEM_MC_BEGIN(4, 0);
7148 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7149 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7150 IEM_MC_ARG(uint32_t, u32Src, 2);
7151 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7152
7153 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7154 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7155 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7156 IEM_MC_REF_EFLAGS(pEFlags);
7157 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7158 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7159 else
7160 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7161
7162 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7163 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7164 IEM_MC_ADVANCE_RIP();
7165 IEM_MC_END();
7166 return VINF_SUCCESS;
7167
7168 case IEMMODE_64BIT:
7169 IEM_MC_BEGIN(4, 0);
7170 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7171 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7172#ifdef RT_ARCH_X86
7173 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7174#else
7175 IEM_MC_ARG(uint64_t, u64Src, 2);
7176#endif
7177 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7178
7179 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7180 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7181 IEM_MC_REF_EFLAGS(pEFlags);
7182#ifdef RT_ARCH_X86
7183 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7185 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7186 else
7187 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7188#else
7189 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7190 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7191 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7192 else
7193 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7194#endif
7195
7196 IEM_MC_ADVANCE_RIP();
7197 IEM_MC_END();
7198 return VINF_SUCCESS;
7199
7200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7201 }
7202 }
7203 else
7204 {
7205 switch (pVCpu->iem.s.enmEffOpSize)
7206 {
7207 case IEMMODE_16BIT:
7208 IEM_MC_BEGIN(4, 3);
7209 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7210 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7211 IEM_MC_ARG(uint16_t, u16Src, 2);
7212 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7214 IEM_MC_LOCAL(uint16_t, u16Ax);
7215
7216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7217 IEMOP_HLP_DONE_DECODING();
7218 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7219 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7220 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7221 IEM_MC_FETCH_EFLAGS(EFlags);
7222 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7223 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7224 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7225 else
7226 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7227
7228 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7229 IEM_MC_COMMIT_EFLAGS(EFlags);
7230 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7231 IEM_MC_ADVANCE_RIP();
7232 IEM_MC_END();
7233 return VINF_SUCCESS;
7234
7235 case IEMMODE_32BIT:
7236 IEM_MC_BEGIN(4, 3);
7237 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7238 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7239 IEM_MC_ARG(uint32_t, u32Src, 2);
7240 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7242 IEM_MC_LOCAL(uint32_t, u32Eax);
7243
7244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7245 IEMOP_HLP_DONE_DECODING();
7246 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7247 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7248 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7249 IEM_MC_FETCH_EFLAGS(EFlags);
7250 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7251 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7252 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7253 else
7254 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7255
7256 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7257 IEM_MC_COMMIT_EFLAGS(EFlags);
7258 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7259 IEM_MC_ADVANCE_RIP();
7260 IEM_MC_END();
7261 return VINF_SUCCESS;
7262
7263 case IEMMODE_64BIT:
7264 IEM_MC_BEGIN(4, 3);
7265 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7266 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7267#ifdef RT_ARCH_X86
7268 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7269#else
7270 IEM_MC_ARG(uint64_t, u64Src, 2);
7271#endif
7272 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7274 IEM_MC_LOCAL(uint64_t, u64Rax);
7275
7276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7277 IEMOP_HLP_DONE_DECODING();
7278 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7279 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7280 IEM_MC_FETCH_EFLAGS(EFlags);
7281 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7282#ifdef RT_ARCH_X86
7283 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7284 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7285 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7286 else
7287 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7288#else
7289 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7290 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7291 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7292 else
7293 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7294#endif
7295
7296 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7297 IEM_MC_COMMIT_EFLAGS(EFlags);
7298 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7299 IEM_MC_ADVANCE_RIP();
7300 IEM_MC_END();
7301 return VINF_SUCCESS;
7302
7303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7304 }
7305 }
7306}
7307
7308
7309FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7310{
7311 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7312 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7313
7314 switch (pVCpu->iem.s.enmEffOpSize)
7315 {
7316 case IEMMODE_16BIT:
7317 IEM_MC_BEGIN(5, 1);
7318 IEM_MC_ARG(uint16_t, uSel, 0);
7319 IEM_MC_ARG(uint16_t, offSeg, 1);
7320 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7321 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7322 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7323 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7326 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7327 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7328 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7329 IEM_MC_END();
7330 return VINF_SUCCESS;
7331
7332 case IEMMODE_32BIT:
7333 IEM_MC_BEGIN(5, 1);
7334 IEM_MC_ARG(uint16_t, uSel, 0);
7335 IEM_MC_ARG(uint32_t, offSeg, 1);
7336 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7337 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7338 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7339 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7342 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7343 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7344 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7345 IEM_MC_END();
7346 return VINF_SUCCESS;
7347
7348 case IEMMODE_64BIT:
7349 IEM_MC_BEGIN(5, 1);
7350 IEM_MC_ARG(uint16_t, uSel, 0);
7351 IEM_MC_ARG(uint64_t, offSeg, 1);
7352 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7353 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7354 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7355 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7358 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7359 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7360 else
7361 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7362 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7363 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7364 IEM_MC_END();
7365 return VINF_SUCCESS;
7366
7367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7368 }
7369}
7370
7371
7372/** Opcode 0x0f 0xb2. */
7373FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7374{
7375 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7376 IEMOP_HLP_MIN_386();
7377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7379 return IEMOP_RAISE_INVALID_OPCODE();
7380 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7381}
7382
7383
7384/** Opcode 0x0f 0xb3. */
7385FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7386{
7387 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7388 IEMOP_HLP_MIN_386();
7389 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7390}
7391
7392
7393/** Opcode 0x0f 0xb4. */
7394FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7395{
7396 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7397 IEMOP_HLP_MIN_386();
7398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7400 return IEMOP_RAISE_INVALID_OPCODE();
7401 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7402}
7403
7404
7405/** Opcode 0x0f 0xb5. */
7406FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7407{
7408 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7409 IEMOP_HLP_MIN_386();
7410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7411 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7412 return IEMOP_RAISE_INVALID_OPCODE();
7413 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7414}
7415
7416
7417/** Opcode 0x0f 0xb6. */
7418FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7419{
7420 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7421 IEMOP_HLP_MIN_386();
7422
7423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7424
7425 /*
7426 * If rm is denoting a register, no more instruction bytes.
7427 */
7428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7429 {
7430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7431 switch (pVCpu->iem.s.enmEffOpSize)
7432 {
7433 case IEMMODE_16BIT:
7434 IEM_MC_BEGIN(0, 1);
7435 IEM_MC_LOCAL(uint16_t, u16Value);
7436 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7437 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7438 IEM_MC_ADVANCE_RIP();
7439 IEM_MC_END();
7440 return VINF_SUCCESS;
7441
7442 case IEMMODE_32BIT:
7443 IEM_MC_BEGIN(0, 1);
7444 IEM_MC_LOCAL(uint32_t, u32Value);
7445 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7446 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7447 IEM_MC_ADVANCE_RIP();
7448 IEM_MC_END();
7449 return VINF_SUCCESS;
7450
7451 case IEMMODE_64BIT:
7452 IEM_MC_BEGIN(0, 1);
7453 IEM_MC_LOCAL(uint64_t, u64Value);
7454 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7455 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7456 IEM_MC_ADVANCE_RIP();
7457 IEM_MC_END();
7458 return VINF_SUCCESS;
7459
7460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7461 }
7462 }
7463 else
7464 {
7465 /*
7466 * We're loading a register from memory.
7467 */
7468 switch (pVCpu->iem.s.enmEffOpSize)
7469 {
7470 case IEMMODE_16BIT:
7471 IEM_MC_BEGIN(0, 2);
7472 IEM_MC_LOCAL(uint16_t, u16Value);
7473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7476 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7477 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7478 IEM_MC_ADVANCE_RIP();
7479 IEM_MC_END();
7480 return VINF_SUCCESS;
7481
7482 case IEMMODE_32BIT:
7483 IEM_MC_BEGIN(0, 2);
7484 IEM_MC_LOCAL(uint32_t, u32Value);
7485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7488 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7489 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7490 IEM_MC_ADVANCE_RIP();
7491 IEM_MC_END();
7492 return VINF_SUCCESS;
7493
7494 case IEMMODE_64BIT:
7495 IEM_MC_BEGIN(0, 2);
7496 IEM_MC_LOCAL(uint64_t, u64Value);
7497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7500 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7501 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7502 IEM_MC_ADVANCE_RIP();
7503 IEM_MC_END();
7504 return VINF_SUCCESS;
7505
7506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7507 }
7508 }
7509}
7510
7511
7512/** Opcode 0x0f 0xb7. */
7513FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7514{
7515 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7516 IEMOP_HLP_MIN_386();
7517
7518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7519
7520 /** @todo Not entirely sure how the operand size prefix is handled here,
7521 * assuming that it will be ignored. Would be nice to have a few
7522 * test for this. */
7523 /*
7524 * If rm is denoting a register, no more instruction bytes.
7525 */
7526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7527 {
7528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7529 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7530 {
7531 IEM_MC_BEGIN(0, 1);
7532 IEM_MC_LOCAL(uint32_t, u32Value);
7533 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7534 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7535 IEM_MC_ADVANCE_RIP();
7536 IEM_MC_END();
7537 }
7538 else
7539 {
7540 IEM_MC_BEGIN(0, 1);
7541 IEM_MC_LOCAL(uint64_t, u64Value);
7542 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7543 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7544 IEM_MC_ADVANCE_RIP();
7545 IEM_MC_END();
7546 }
7547 }
7548 else
7549 {
7550 /*
7551 * We're loading a register from memory.
7552 */
7553 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7554 {
7555 IEM_MC_BEGIN(0, 2);
7556 IEM_MC_LOCAL(uint32_t, u32Value);
7557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7560 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7561 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7562 IEM_MC_ADVANCE_RIP();
7563 IEM_MC_END();
7564 }
7565 else
7566 {
7567 IEM_MC_BEGIN(0, 2);
7568 IEM_MC_LOCAL(uint64_t, u64Value);
7569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7572 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7573 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7574 IEM_MC_ADVANCE_RIP();
7575 IEM_MC_END();
7576 }
7577 }
7578 return VINF_SUCCESS;
7579}
7580
7581
7582/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7583FNIEMOP_UD_STUB(iemOp_jmpe);
7584/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7585FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7586
7587
7588/**
7589 * @opcode 0xb9
7590 * @opinvalid intel-modrm
7591 * @optest ->
7592 */
7593FNIEMOP_DEF(iemOp_Grp10)
7594{
7595 /*
7596 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7597 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7598 */
7599 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7600 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7601 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7602}
7603
7604
7605/** Opcode 0x0f 0xba. */
7606FNIEMOP_DEF(iemOp_Grp8)
7607{
7608 IEMOP_HLP_MIN_386();
7609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7610 PCIEMOPBINSIZES pImpl;
7611 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7612 {
7613 case 0: case 1: case 2: case 3:
7614 /* Both AMD and Intel want full modr/m decoding and imm8. */
7615 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7616 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7617 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7618 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7619 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7621 }
7622 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7623
7624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7625 {
7626 /* register destination. */
7627 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7629
7630 switch (pVCpu->iem.s.enmEffOpSize)
7631 {
7632 case IEMMODE_16BIT:
7633 IEM_MC_BEGIN(3, 0);
7634 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7635 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7636 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7637
7638 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7639 IEM_MC_REF_EFLAGS(pEFlags);
7640 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7641
7642 IEM_MC_ADVANCE_RIP();
7643 IEM_MC_END();
7644 return VINF_SUCCESS;
7645
7646 case IEMMODE_32BIT:
7647 IEM_MC_BEGIN(3, 0);
7648 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7649 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7650 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7651
7652 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7653 IEM_MC_REF_EFLAGS(pEFlags);
7654 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7655
7656 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7657 IEM_MC_ADVANCE_RIP();
7658 IEM_MC_END();
7659 return VINF_SUCCESS;
7660
7661 case IEMMODE_64BIT:
7662 IEM_MC_BEGIN(3, 0);
7663 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7664 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7665 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7666
7667 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7668 IEM_MC_REF_EFLAGS(pEFlags);
7669 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7670
7671 IEM_MC_ADVANCE_RIP();
7672 IEM_MC_END();
7673 return VINF_SUCCESS;
7674
7675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7676 }
7677 }
7678 else
7679 {
7680 /* memory destination. */
7681
7682 uint32_t fAccess;
7683 if (pImpl->pfnLockedU16)
7684 fAccess = IEM_ACCESS_DATA_RW;
7685 else /* BT */
7686 fAccess = IEM_ACCESS_DATA_R;
7687
7688 /** @todo test negative bit offsets! */
7689 switch (pVCpu->iem.s.enmEffOpSize)
7690 {
7691 case IEMMODE_16BIT:
7692 IEM_MC_BEGIN(3, 1);
7693 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7694 IEM_MC_ARG(uint16_t, u16Src, 1);
7695 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7697
7698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7699 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7700 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7701 if (pImpl->pfnLockedU16)
7702 IEMOP_HLP_DONE_DECODING();
7703 else
7704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7705 IEM_MC_FETCH_EFLAGS(EFlags);
7706 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7707 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7708 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7709 else
7710 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7711 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7712
7713 IEM_MC_COMMIT_EFLAGS(EFlags);
7714 IEM_MC_ADVANCE_RIP();
7715 IEM_MC_END();
7716 return VINF_SUCCESS;
7717
7718 case IEMMODE_32BIT:
7719 IEM_MC_BEGIN(3, 1);
7720 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7721 IEM_MC_ARG(uint32_t, u32Src, 1);
7722 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7724
7725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7726 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7727 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7728 if (pImpl->pfnLockedU16)
7729 IEMOP_HLP_DONE_DECODING();
7730 else
7731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7732 IEM_MC_FETCH_EFLAGS(EFlags);
7733 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7734 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7735 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7736 else
7737 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7738 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7739
7740 IEM_MC_COMMIT_EFLAGS(EFlags);
7741 IEM_MC_ADVANCE_RIP();
7742 IEM_MC_END();
7743 return VINF_SUCCESS;
7744
7745 case IEMMODE_64BIT:
7746 IEM_MC_BEGIN(3, 1);
7747 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7748 IEM_MC_ARG(uint64_t, u64Src, 1);
7749 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7751
7752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7753 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7754 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7755 if (pImpl->pfnLockedU16)
7756 IEMOP_HLP_DONE_DECODING();
7757 else
7758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7759 IEM_MC_FETCH_EFLAGS(EFlags);
7760 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7761 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7762 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7763 else
7764 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7765 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7766
7767 IEM_MC_COMMIT_EFLAGS(EFlags);
7768 IEM_MC_ADVANCE_RIP();
7769 IEM_MC_END();
7770 return VINF_SUCCESS;
7771
7772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7773 }
7774 }
7775}
7776
7777
7778/** Opcode 0x0f 0xbb. */
7779FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7780{
7781 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7782 IEMOP_HLP_MIN_386();
7783 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7784}
7785
7786
7787/** Opcode 0x0f 0xbc. */
7788FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7789{
7790 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7791 IEMOP_HLP_MIN_386();
7792 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7793 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7794}
7795
7796
7797/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7798FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7799
7800
7801/** Opcode 0x0f 0xbd. */
7802FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7803{
7804 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7805 IEMOP_HLP_MIN_386();
7806 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7807 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7808}
7809
7810
7811/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7812FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7813
7814
7815/** Opcode 0x0f 0xbe. */
7816FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7817{
7818 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7819 IEMOP_HLP_MIN_386();
7820
7821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7822
7823 /*
7824 * If rm is denoting a register, no more instruction bytes.
7825 */
7826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7827 {
7828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7829 switch (pVCpu->iem.s.enmEffOpSize)
7830 {
7831 case IEMMODE_16BIT:
7832 IEM_MC_BEGIN(0, 1);
7833 IEM_MC_LOCAL(uint16_t, u16Value);
7834 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7835 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7836 IEM_MC_ADVANCE_RIP();
7837 IEM_MC_END();
7838 return VINF_SUCCESS;
7839
7840 case IEMMODE_32BIT:
7841 IEM_MC_BEGIN(0, 1);
7842 IEM_MC_LOCAL(uint32_t, u32Value);
7843 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7844 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7845 IEM_MC_ADVANCE_RIP();
7846 IEM_MC_END();
7847 return VINF_SUCCESS;
7848
7849 case IEMMODE_64BIT:
7850 IEM_MC_BEGIN(0, 1);
7851 IEM_MC_LOCAL(uint64_t, u64Value);
7852 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7853 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7854 IEM_MC_ADVANCE_RIP();
7855 IEM_MC_END();
7856 return VINF_SUCCESS;
7857
7858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7859 }
7860 }
7861 else
7862 {
7863 /*
7864 * We're loading a register from memory.
7865 */
7866 switch (pVCpu->iem.s.enmEffOpSize)
7867 {
7868 case IEMMODE_16BIT:
7869 IEM_MC_BEGIN(0, 2);
7870 IEM_MC_LOCAL(uint16_t, u16Value);
7871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7874 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7875 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7876 IEM_MC_ADVANCE_RIP();
7877 IEM_MC_END();
7878 return VINF_SUCCESS;
7879
7880 case IEMMODE_32BIT:
7881 IEM_MC_BEGIN(0, 2);
7882 IEM_MC_LOCAL(uint32_t, u32Value);
7883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7886 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7887 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7888 IEM_MC_ADVANCE_RIP();
7889 IEM_MC_END();
7890 return VINF_SUCCESS;
7891
7892 case IEMMODE_64BIT:
7893 IEM_MC_BEGIN(0, 2);
7894 IEM_MC_LOCAL(uint64_t, u64Value);
7895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7898 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7899 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7900 IEM_MC_ADVANCE_RIP();
7901 IEM_MC_END();
7902 return VINF_SUCCESS;
7903
7904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7905 }
7906 }
7907}
7908
7909
7910/** Opcode 0x0f 0xbf. */
7911FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7912{
7913 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7914 IEMOP_HLP_MIN_386();
7915
7916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7917
7918 /** @todo Not entirely sure how the operand size prefix is handled here,
7919 * assuming that it will be ignored. Would be nice to have a few
7920 * test for this. */
7921 /*
7922 * If rm is denoting a register, no more instruction bytes.
7923 */
7924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7925 {
7926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7927 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7928 {
7929 IEM_MC_BEGIN(0, 1);
7930 IEM_MC_LOCAL(uint32_t, u32Value);
7931 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7932 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7933 IEM_MC_ADVANCE_RIP();
7934 IEM_MC_END();
7935 }
7936 else
7937 {
7938 IEM_MC_BEGIN(0, 1);
7939 IEM_MC_LOCAL(uint64_t, u64Value);
7940 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7941 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7942 IEM_MC_ADVANCE_RIP();
7943 IEM_MC_END();
7944 }
7945 }
7946 else
7947 {
7948 /*
7949 * We're loading a register from memory.
7950 */
7951 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7952 {
7953 IEM_MC_BEGIN(0, 2);
7954 IEM_MC_LOCAL(uint32_t, u32Value);
7955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7958 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7959 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7960 IEM_MC_ADVANCE_RIP();
7961 IEM_MC_END();
7962 }
7963 else
7964 {
7965 IEM_MC_BEGIN(0, 2);
7966 IEM_MC_LOCAL(uint64_t, u64Value);
7967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7970 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7971 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7972 IEM_MC_ADVANCE_RIP();
7973 IEM_MC_END();
7974 }
7975 }
7976 return VINF_SUCCESS;
7977}
7978
7979
7980/** Opcode 0x0f 0xc0. */
7981FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7982{
7983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7984 IEMOP_HLP_MIN_486();
7985 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7986
7987 /*
7988 * If rm is denoting a register, no more instruction bytes.
7989 */
7990 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7991 {
7992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7993
7994 IEM_MC_BEGIN(3, 0);
7995 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7996 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7998
7999 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8000 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8001 IEM_MC_REF_EFLAGS(pEFlags);
8002 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8003
8004 IEM_MC_ADVANCE_RIP();
8005 IEM_MC_END();
8006 }
8007 else
8008 {
8009 /*
8010 * We're accessing memory.
8011 */
8012 IEM_MC_BEGIN(3, 3);
8013 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8014 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8015 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8016 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8018
8019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8020 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8021 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8022 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8023 IEM_MC_FETCH_EFLAGS(EFlags);
8024 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8025 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8026 else
8027 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8028
8029 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8030 IEM_MC_COMMIT_EFLAGS(EFlags);
8031 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8032 IEM_MC_ADVANCE_RIP();
8033 IEM_MC_END();
8034 return VINF_SUCCESS;
8035 }
8036 return VINF_SUCCESS;
8037}
8038
8039
8040/** Opcode 0x0f 0xc1. */
8041FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8042{
8043 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8044 IEMOP_HLP_MIN_486();
8045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8046
8047 /*
8048 * If rm is denoting a register, no more instruction bytes.
8049 */
8050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8051 {
8052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8053
8054 switch (pVCpu->iem.s.enmEffOpSize)
8055 {
8056 case IEMMODE_16BIT:
8057 IEM_MC_BEGIN(3, 0);
8058 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8059 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8060 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8061
8062 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8063 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8064 IEM_MC_REF_EFLAGS(pEFlags);
8065 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8066
8067 IEM_MC_ADVANCE_RIP();
8068 IEM_MC_END();
8069 return VINF_SUCCESS;
8070
8071 case IEMMODE_32BIT:
8072 IEM_MC_BEGIN(3, 0);
8073 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8074 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8075 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8076
8077 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8078 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8079 IEM_MC_REF_EFLAGS(pEFlags);
8080 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8081
8082 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8083 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8084 IEM_MC_ADVANCE_RIP();
8085 IEM_MC_END();
8086 return VINF_SUCCESS;
8087
8088 case IEMMODE_64BIT:
8089 IEM_MC_BEGIN(3, 0);
8090 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8091 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8092 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8093
8094 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8095 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8096 IEM_MC_REF_EFLAGS(pEFlags);
8097 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8098
8099 IEM_MC_ADVANCE_RIP();
8100 IEM_MC_END();
8101 return VINF_SUCCESS;
8102
8103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8104 }
8105 }
8106 else
8107 {
8108 /*
8109 * We're accessing memory.
8110 */
8111 switch (pVCpu->iem.s.enmEffOpSize)
8112 {
8113 case IEMMODE_16BIT:
8114 IEM_MC_BEGIN(3, 3);
8115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8116 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8117 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8118 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8120
8121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8122 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8123 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8124 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8125 IEM_MC_FETCH_EFLAGS(EFlags);
8126 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8127 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8128 else
8129 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8130
8131 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8132 IEM_MC_COMMIT_EFLAGS(EFlags);
8133 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8134 IEM_MC_ADVANCE_RIP();
8135 IEM_MC_END();
8136 return VINF_SUCCESS;
8137
8138 case IEMMODE_32BIT:
8139 IEM_MC_BEGIN(3, 3);
8140 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8141 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8142 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8143 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8145
8146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8147 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8148 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8149 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8150 IEM_MC_FETCH_EFLAGS(EFlags);
8151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8152 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8153 else
8154 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8155
8156 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8157 IEM_MC_COMMIT_EFLAGS(EFlags);
8158 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8159 IEM_MC_ADVANCE_RIP();
8160 IEM_MC_END();
8161 return VINF_SUCCESS;
8162
8163 case IEMMODE_64BIT:
8164 IEM_MC_BEGIN(3, 3);
8165 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8166 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8167 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8168 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8170
8171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8172 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8173 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8174 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8175 IEM_MC_FETCH_EFLAGS(EFlags);
8176 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8177 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8178 else
8179 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8180
8181 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8182 IEM_MC_COMMIT_EFLAGS(EFlags);
8183 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8184 IEM_MC_ADVANCE_RIP();
8185 IEM_MC_END();
8186 return VINF_SUCCESS;
8187
8188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8189 }
8190 }
8191}
8192
8193
8194/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8195FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8196/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8197FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8198/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8199FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8200/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8201FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8202
8203
8204/** Opcode 0x0f 0xc3. */
8205FNIEMOP_DEF(iemOp_movnti_My_Gy)
8206{
8207 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8208
8209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8210
8211 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8212 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8213 {
8214 switch (pVCpu->iem.s.enmEffOpSize)
8215 {
8216 case IEMMODE_32BIT:
8217 IEM_MC_BEGIN(0, 2);
8218 IEM_MC_LOCAL(uint32_t, u32Value);
8219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8220
8221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8223 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8224 return IEMOP_RAISE_INVALID_OPCODE();
8225
8226 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8227 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8228 IEM_MC_ADVANCE_RIP();
8229 IEM_MC_END();
8230 break;
8231
8232 case IEMMODE_64BIT:
8233 IEM_MC_BEGIN(0, 2);
8234 IEM_MC_LOCAL(uint64_t, u64Value);
8235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8236
8237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8239 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8240 return IEMOP_RAISE_INVALID_OPCODE();
8241
8242 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8243 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8244 IEM_MC_ADVANCE_RIP();
8245 IEM_MC_END();
8246 break;
8247
8248 case IEMMODE_16BIT:
8249 /** @todo check this form. */
8250 return IEMOP_RAISE_INVALID_OPCODE();
8251 }
8252 }
8253 else
8254 return IEMOP_RAISE_INVALID_OPCODE();
8255 return VINF_SUCCESS;
8256}
8257/* Opcode 0x66 0x0f 0xc3 - invalid */
8258/* Opcode 0xf3 0x0f 0xc3 - invalid */
8259/* Opcode 0xf2 0x0f 0xc3 - invalid */
8260
8261/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8262FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8263/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8264FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8265/* Opcode 0xf3 0x0f 0xc4 - invalid */
8266/* Opcode 0xf2 0x0f 0xc4 - invalid */
8267
8268/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8269FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8270/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8271FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8272/* Opcode 0xf3 0x0f 0xc5 - invalid */
8273/* Opcode 0xf2 0x0f 0xc5 - invalid */
8274
8275/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8276FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8277/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8278FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8279/* Opcode 0xf3 0x0f 0xc6 - invalid */
8280/* Opcode 0xf2 0x0f 0xc6 - invalid */
8281
8282
8283/** Opcode 0x0f 0xc7 !11/1. */
8284FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8285{
8286 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8287
8288 IEM_MC_BEGIN(4, 3);
8289 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8290 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8291 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8292 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8293 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8294 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8296
8297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8298 IEMOP_HLP_DONE_DECODING();
8299 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8300
8301 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8302 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8303 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8304
8305 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8306 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8307 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8308
8309 IEM_MC_FETCH_EFLAGS(EFlags);
8310 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8311 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8312 else
8313 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8314
8315 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8316 IEM_MC_COMMIT_EFLAGS(EFlags);
8317 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8318 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8319 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8320 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8321 IEM_MC_ENDIF();
8322 IEM_MC_ADVANCE_RIP();
8323
8324 IEM_MC_END();
8325 return VINF_SUCCESS;
8326}
8327
8328
8329/** Opcode REX.W 0x0f 0xc7 !11/1. */
8330FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8331{
8332 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8333 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8334 {
8335#if 0
8336 RT_NOREF(bRm);
8337 IEMOP_BITCH_ABOUT_STUB();
8338 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8339#else
8340 IEM_MC_BEGIN(4, 3);
8341 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8342 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8343 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8344 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8345 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8346 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8348
8349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8350 IEMOP_HLP_DONE_DECODING();
8351 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8352 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8353
8354 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8355 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8356 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8357
8358 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8359 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8360 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8361
8362 IEM_MC_FETCH_EFLAGS(EFlags);
8363# ifdef RT_ARCH_AMD64
8364 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8365 {
8366 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8367 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8368 else
8369 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8370 }
8371 else
8372# endif
8373 {
8374 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8375 accesses and not all all atomic, which works fine on in UNI CPU guest
8376 configuration (ignoring DMA). If guest SMP is active we have no choice
8377 but to use a rendezvous callback here. Sigh. */
8378 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8379 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8380 else
8381 {
8382 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8383 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8384 }
8385 }
8386
8387 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8388 IEM_MC_COMMIT_EFLAGS(EFlags);
8389 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8390 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8391 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8392 IEM_MC_ENDIF();
8393 IEM_MC_ADVANCE_RIP();
8394
8395 IEM_MC_END();
8396 return VINF_SUCCESS;
8397#endif
8398 }
8399 Log(("cmpxchg16b -> #UD\n"));
8400 return IEMOP_RAISE_INVALID_OPCODE();
8401}
8402
8403FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8404{
8405 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8406 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8407 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8408}
8409
8410/** Opcode 0x0f 0xc7 11/6. */
8411FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8412
8413/** Opcode 0x0f 0xc7 !11/6. */
8414FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8415
8416/** Opcode 0x66 0x0f 0xc7 !11/6. */
8417FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8418
8419/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8420FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8421
8422/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8423FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8424
8425/** Opcode 0x0f 0xc7 11/7. */
8426FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8427
8428
8429/**
8430 * Group 9 jump table for register variant.
8431 */
8432IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8433{ /* pfx: none, 066h, 0f3h, 0f2h */
8434 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8435 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8436 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8437 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8438 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8439 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8440 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8441 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8442};
8443AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8444
8445
8446/**
8447 * Group 9 jump table for memory variant.
8448 */
8449IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8450{ /* pfx: none, 066h, 0f3h, 0f2h */
8451 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8452 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8453 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8454 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8455 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8456 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8457 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8458 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8459};
8460AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8461
8462
8463/** Opcode 0x0f 0xc7. */
8464FNIEMOP_DEF(iemOp_Grp9)
8465{
8466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8467 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8468 /* register, register */
8469 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8470 + pVCpu->iem.s.idxPrefix], bRm);
8471 /* memory, register */
8472 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8473 + pVCpu->iem.s.idxPrefix], bRm);
8474}
8475
8476
8477/**
8478 * Common 'bswap register' helper.
8479 */
8480FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8481{
8482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8483 switch (pVCpu->iem.s.enmEffOpSize)
8484 {
8485 case IEMMODE_16BIT:
8486 IEM_MC_BEGIN(1, 0);
8487 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8488 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8489 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8490 IEM_MC_ADVANCE_RIP();
8491 IEM_MC_END();
8492 return VINF_SUCCESS;
8493
8494 case IEMMODE_32BIT:
8495 IEM_MC_BEGIN(1, 0);
8496 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8497 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8498 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8499 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8500 IEM_MC_ADVANCE_RIP();
8501 IEM_MC_END();
8502 return VINF_SUCCESS;
8503
8504 case IEMMODE_64BIT:
8505 IEM_MC_BEGIN(1, 0);
8506 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8507 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8508 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8509 IEM_MC_ADVANCE_RIP();
8510 IEM_MC_END();
8511 return VINF_SUCCESS;
8512
8513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8514 }
8515}
8516
8517
8518/** Opcode 0x0f 0xc8. */
8519FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8520{
8521 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8522 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8523 prefix. REX.B is the correct prefix it appears. For a parallel
8524 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8525 IEMOP_HLP_MIN_486();
8526 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8527}
8528
8529
8530/** Opcode 0x0f 0xc9. */
8531FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8532{
8533 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8534 IEMOP_HLP_MIN_486();
8535 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8536}
8537
8538
8539/** Opcode 0x0f 0xca. */
8540FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8541{
8542 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8543 IEMOP_HLP_MIN_486();
8544 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8545}
8546
8547
8548/** Opcode 0x0f 0xcb. */
8549FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8550{
8551 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8552 IEMOP_HLP_MIN_486();
8553 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8554}
8555
8556
8557/** Opcode 0x0f 0xcc. */
8558FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8559{
8560 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8561 IEMOP_HLP_MIN_486();
8562 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8563}
8564
8565
8566/** Opcode 0x0f 0xcd. */
8567FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8568{
8569 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8570 IEMOP_HLP_MIN_486();
8571 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8572}
8573
8574
8575/** Opcode 0x0f 0xce. */
8576FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8577{
8578 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8579 IEMOP_HLP_MIN_486();
8580 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8581}
8582
8583
8584/** Opcode 0x0f 0xcf. */
8585FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8586{
8587 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8588 IEMOP_HLP_MIN_486();
8589 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8590}
8591
8592
8593/* Opcode 0x0f 0xd0 - invalid */
8594/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8595FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8596/* Opcode 0xf3 0x0f 0xd0 - invalid */
8597/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8598FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8599
8600/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8601FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8602/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8603FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8604/* Opcode 0xf3 0x0f 0xd1 - invalid */
8605/* Opcode 0xf2 0x0f 0xd1 - invalid */
8606
8607/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8608FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8609/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8610FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8611/* Opcode 0xf3 0x0f 0xd2 - invalid */
8612/* Opcode 0xf2 0x0f 0xd2 - invalid */
8613
8614/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8615FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8616/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8617FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8618/* Opcode 0xf3 0x0f 0xd3 - invalid */
8619/* Opcode 0xf2 0x0f 0xd3 - invalid */
8620
8621/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8622FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8623/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8624FNIEMOP_STUB(iemOp_paddq_Vx_W);
8625/* Opcode 0xf3 0x0f 0xd4 - invalid */
8626/* Opcode 0xf2 0x0f 0xd4 - invalid */
8627
8628/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8629FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8630/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8631FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8632/* Opcode 0xf3 0x0f 0xd5 - invalid */
8633/* Opcode 0xf2 0x0f 0xd5 - invalid */
8634
8635/* Opcode 0x0f 0xd6 - invalid */
8636
8637/**
8638 * @opcode 0xd6
8639 * @oppfx 0x66
8640 * @opcpuid sse2
8641 * @opgroup og_sse2_pcksclr_datamove
8642 * @opxcpttype none
8643 * @optest op1=-1 op2=2 -> op1=2
8644 * @optest op1=0 op2=-42 -> op1=-42
8645 */
8646FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8647{
8648 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8650 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8651 {
8652 /*
8653 * Register, register.
8654 */
8655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8656 IEM_MC_BEGIN(0, 2);
8657 IEM_MC_LOCAL(uint64_t, uSrc);
8658
8659 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8660 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8661
8662 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8663 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8664
8665 IEM_MC_ADVANCE_RIP();
8666 IEM_MC_END();
8667 }
8668 else
8669 {
8670 /*
8671 * Memory, register.
8672 */
8673 IEM_MC_BEGIN(0, 2);
8674 IEM_MC_LOCAL(uint64_t, uSrc);
8675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8676
8677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8679 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8681
8682 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8683 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8684
8685 IEM_MC_ADVANCE_RIP();
8686 IEM_MC_END();
8687 }
8688 return VINF_SUCCESS;
8689}
8690
8691
8692/**
8693 * @opcode 0xd6
8694 * @opcodesub 11 mr/reg
8695 * @oppfx f3
8696 * @opcpuid sse2
8697 * @opgroup og_sse2_simdint_datamove
8698 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8699 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8700 */
8701FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8702{
8703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8704 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8705 {
8706 /*
8707 * Register, register.
8708 */
8709 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8711 IEM_MC_BEGIN(0, 1);
8712 IEM_MC_LOCAL(uint64_t, uSrc);
8713
8714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8715 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8716
8717 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8718 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8719 IEM_MC_FPU_TO_MMX_MODE();
8720
8721 IEM_MC_ADVANCE_RIP();
8722 IEM_MC_END();
8723 return VINF_SUCCESS;
8724 }
8725
8726 /**
8727 * @opdone
8728 * @opmnemonic udf30fd6mem
8729 * @opcode 0xd6
8730 * @opcodesub !11 mr/reg
8731 * @oppfx f3
8732 * @opunused intel-modrm
8733 * @opcpuid sse
8734 * @optest ->
8735 */
8736 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8737}
8738
8739
8740/**
8741 * @opcode 0xd6
8742 * @opcodesub 11 mr/reg
8743 * @oppfx f2
8744 * @opcpuid sse2
8745 * @opgroup og_sse2_simdint_datamove
8746 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8747 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8748 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8749 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8750 * @optest op1=-42 op2=0xfedcba9876543210
8751 * -> op1=0xfedcba9876543210 ftw=0xff
8752 */
8753FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8754{
8755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8757 {
8758 /*
8759 * Register, register.
8760 */
8761 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8763 IEM_MC_BEGIN(0, 1);
8764 IEM_MC_LOCAL(uint64_t, uSrc);
8765
8766 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8767 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8768
8769 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8770 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8771 IEM_MC_FPU_TO_MMX_MODE();
8772
8773 IEM_MC_ADVANCE_RIP();
8774 IEM_MC_END();
8775 return VINF_SUCCESS;
8776 }
8777
8778 /**
8779 * @opdone
8780 * @opmnemonic udf20fd6mem
8781 * @opcode 0xd6
8782 * @opcodesub !11 mr/reg
8783 * @oppfx f2
8784 * @opunused intel-modrm
8785 * @opcpuid sse
8786 * @optest ->
8787 */
8788 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8789}
8790
8791/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8792FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8793{
8794 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8795 /** @todo testcase: Check that the instruction implicitly clears the high
8796 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8797 * and opcode modifications are made to work with the whole width (not
8798 * just 128). */
8799 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8800 /* Docs says register only. */
8801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8803 {
8804 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8805 IEM_MC_BEGIN(2, 0);
8806 IEM_MC_ARG(uint64_t *, pDst, 0);
8807 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8808 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8809 IEM_MC_PREPARE_FPU_USAGE();
8810 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8811 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8812 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8813 IEM_MC_ADVANCE_RIP();
8814 IEM_MC_END();
8815 return VINF_SUCCESS;
8816 }
8817 return IEMOP_RAISE_INVALID_OPCODE();
8818}
8819
8820/** Opcode 0x66 0x0f 0xd7 - */
8821FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8822{
8823 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8824 /** @todo testcase: Check that the instruction implicitly clears the high
8825 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8826 * and opcode modifications are made to work with the whole width (not
8827 * just 128). */
8828 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8829 /* Docs says register only. */
8830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8831 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8832 {
8833 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8834 IEM_MC_BEGIN(2, 0);
8835 IEM_MC_ARG(uint64_t *, pDst, 0);
8836 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8837 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8838 IEM_MC_PREPARE_SSE_USAGE();
8839 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8840 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8841 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8842 IEM_MC_ADVANCE_RIP();
8843 IEM_MC_END();
8844 return VINF_SUCCESS;
8845 }
8846 return IEMOP_RAISE_INVALID_OPCODE();
8847}
8848
8849/* Opcode 0xf3 0x0f 0xd7 - invalid */
8850/* Opcode 0xf2 0x0f 0xd7 - invalid */
8851
8852
8853/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8854FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8855/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8856FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8857/* Opcode 0xf3 0x0f 0xd8 - invalid */
8858/* Opcode 0xf2 0x0f 0xd8 - invalid */
8859
8860/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8861FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8862/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8863FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8864/* Opcode 0xf3 0x0f 0xd9 - invalid */
8865/* Opcode 0xf2 0x0f 0xd9 - invalid */
8866
8867/** Opcode 0x0f 0xda - pminub Pq, Qq */
8868FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8869/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8870FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8871/* Opcode 0xf3 0x0f 0xda - invalid */
8872/* Opcode 0xf2 0x0f 0xda - invalid */
8873
8874/** Opcode 0x0f 0xdb - pand Pq, Qq */
8875FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8876/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8877FNIEMOP_STUB(iemOp_pand_Vx_W);
8878/* Opcode 0xf3 0x0f 0xdb - invalid */
8879/* Opcode 0xf2 0x0f 0xdb - invalid */
8880
8881/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8882FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8883/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8884FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8885/* Opcode 0xf3 0x0f 0xdc - invalid */
8886/* Opcode 0xf2 0x0f 0xdc - invalid */
8887
8888/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8889FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8890/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8891FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8892/* Opcode 0xf3 0x0f 0xdd - invalid */
8893/* Opcode 0xf2 0x0f 0xdd - invalid */
8894
8895/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8896FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8897/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8898FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8899/* Opcode 0xf3 0x0f 0xde - invalid */
8900/* Opcode 0xf2 0x0f 0xde - invalid */
8901
8902/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8903FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8904/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8905FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8906/* Opcode 0xf3 0x0f 0xdf - invalid */
8907/* Opcode 0xf2 0x0f 0xdf - invalid */
8908
8909/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8910FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8911/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8912FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8913/* Opcode 0xf3 0x0f 0xe0 - invalid */
8914/* Opcode 0xf2 0x0f 0xe0 - invalid */
8915
8916/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8917FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8918/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8919FNIEMOP_STUB(iemOp_psraw_Vx_W);
8920/* Opcode 0xf3 0x0f 0xe1 - invalid */
8921/* Opcode 0xf2 0x0f 0xe1 - invalid */
8922
8923/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8924FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8925/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8926FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8927/* Opcode 0xf3 0x0f 0xe2 - invalid */
8928/* Opcode 0xf2 0x0f 0xe2 - invalid */
8929
8930/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8931FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8932/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8933FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8934/* Opcode 0xf3 0x0f 0xe3 - invalid */
8935/* Opcode 0xf2 0x0f 0xe3 - invalid */
8936
8937/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8938FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8939/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8940FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8941/* Opcode 0xf3 0x0f 0xe4 - invalid */
8942/* Opcode 0xf2 0x0f 0xe4 - invalid */
8943
8944/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8945FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8946/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8947FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8948/* Opcode 0xf3 0x0f 0xe5 - invalid */
8949/* Opcode 0xf2 0x0f 0xe5 - invalid */
8950
8951/* Opcode 0x0f 0xe6 - invalid */
8952/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8953FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8954/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8955FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8956/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8957FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8958
8959
8960/**
8961 * @opcode 0xe7
8962 * @opcodesub !11 mr/reg
8963 * @oppfx none
8964 * @opcpuid sse
8965 * @opgroup og_sse1_cachect
8966 * @opxcpttype none
8967 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
8968 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8969 */
8970FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8971{
8972 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8974 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8975 {
8976 /* Register, memory. */
8977 IEM_MC_BEGIN(0, 2);
8978 IEM_MC_LOCAL(uint64_t, uSrc);
8979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8980
8981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8983 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8984 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8985
8986 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8987 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8988 IEM_MC_FPU_TO_MMX_MODE();
8989
8990 IEM_MC_ADVANCE_RIP();
8991 IEM_MC_END();
8992 return VINF_SUCCESS;
8993 }
8994 /**
8995 * @opdone
8996 * @opmnemonic ud0fe7reg
8997 * @opcode 0xe7
8998 * @opcodesub 11 mr/reg
8999 * @oppfx none
9000 * @opunused immediate
9001 * @opcpuid sse
9002 * @optest ->
9003 */
9004 return IEMOP_RAISE_INVALID_OPCODE();
9005}
9006
9007/**
9008 * @opcode 0xe7
9009 * @opcodesub !11 mr/reg
9010 * @oppfx 0x66
9011 * @opcpuid sse2
9012 * @opgroup og_sse2_cachect
9013 * @opxcpttype 1
9014 * @optest op1=-1 op2=2 -> op1=2
9015 * @optest op1=0 op2=-42 -> op1=-42
9016 */
9017FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9018{
9019 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9021 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9022 {
9023 /* Register, memory. */
9024 IEM_MC_BEGIN(0, 2);
9025 IEM_MC_LOCAL(RTUINT128U, uSrc);
9026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9027
9028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9030 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9032
9033 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9034 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9035
9036 IEM_MC_ADVANCE_RIP();
9037 IEM_MC_END();
9038 return VINF_SUCCESS;
9039 }
9040
9041 /**
9042 * @opdone
9043 * @opmnemonic ud660fe7reg
9044 * @opcode 0xe7
9045 * @opcodesub 11 mr/reg
9046 * @oppfx 0x66
9047 * @opunused immediate
9048 * @opcpuid sse
9049 * @optest ->
9050 */
9051 return IEMOP_RAISE_INVALID_OPCODE();
9052}
9053
9054/* Opcode 0xf3 0x0f 0xe7 - invalid */
9055/* Opcode 0xf2 0x0f 0xe7 - invalid */
9056
9057
9058/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9059FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9060/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9061FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9062/* Opcode 0xf3 0x0f 0xe8 - invalid */
9063/* Opcode 0xf2 0x0f 0xe8 - invalid */
9064
9065/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9066FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9067/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9068FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9069/* Opcode 0xf3 0x0f 0xe9 - invalid */
9070/* Opcode 0xf2 0x0f 0xe9 - invalid */
9071
9072/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9073FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9074/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9075FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9076/* Opcode 0xf3 0x0f 0xea - invalid */
9077/* Opcode 0xf2 0x0f 0xea - invalid */
9078
9079/** Opcode 0x0f 0xeb - por Pq, Qq */
9080FNIEMOP_STUB(iemOp_por_Pq_Qq);
9081/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9082FNIEMOP_STUB(iemOp_por_Vx_W);
9083/* Opcode 0xf3 0x0f 0xeb - invalid */
9084/* Opcode 0xf2 0x0f 0xeb - invalid */
9085
9086/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9087FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9088/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9089FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9090/* Opcode 0xf3 0x0f 0xec - invalid */
9091/* Opcode 0xf2 0x0f 0xec - invalid */
9092
9093/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9094FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9095/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9096FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9097/* Opcode 0xf3 0x0f 0xed - invalid */
9098/* Opcode 0xf2 0x0f 0xed - invalid */
9099
9100/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9101FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9102/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9103FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9104/* Opcode 0xf3 0x0f 0xee - invalid */
9105/* Opcode 0xf2 0x0f 0xee - invalid */
9106
9107
9108/** Opcode 0x0f 0xef - pxor Pq, Qq */
9109FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9110{
9111 IEMOP_MNEMONIC(pxor, "pxor");
9112 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9113}
9114
9115/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9116FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9117{
9118 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9119 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9120}
9121
9122/* Opcode 0xf3 0x0f 0xef - invalid */
9123/* Opcode 0xf2 0x0f 0xef - invalid */
9124
9125/* Opcode 0x0f 0xf0 - invalid */
9126/* Opcode 0x66 0x0f 0xf0 - invalid */
9127/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9128FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9129
9130/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9131FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9132/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9133FNIEMOP_STUB(iemOp_psllw_Vx_W);
9134/* Opcode 0xf2 0x0f 0xf1 - invalid */
9135
9136/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9137FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9138/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9139FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9140/* Opcode 0xf2 0x0f 0xf2 - invalid */
9141
9142/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9143FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9144/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9145FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9146/* Opcode 0xf2 0x0f 0xf3 - invalid */
9147
9148/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9149FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9150/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9151FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9152/* Opcode 0xf2 0x0f 0xf4 - invalid */
9153
9154/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9155FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9156/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9157FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9158/* Opcode 0xf2 0x0f 0xf5 - invalid */
9159
9160/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9161FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9162/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9163FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9164/* Opcode 0xf2 0x0f 0xf6 - invalid */
9165
9166/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9167FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9168/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9169FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9170/* Opcode 0xf2 0x0f 0xf7 - invalid */
9171
9172/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9173FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9174/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9175FNIEMOP_STUB(iemOp_psubb_Vx_W);
9176/* Opcode 0xf2 0x0f 0xf8 - invalid */
9177
9178/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9179FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9180/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9181FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9182/* Opcode 0xf2 0x0f 0xf9 - invalid */
9183
9184/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9185FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9186/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9187FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9188/* Opcode 0xf2 0x0f 0xfa - invalid */
9189
9190/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9191FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9192/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9193FNIEMOP_STUB(iemOp_psubq_Vx_W);
9194/* Opcode 0xf2 0x0f 0xfb - invalid */
9195
9196/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9197FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9198/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9199FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9200/* Opcode 0xf2 0x0f 0xfc - invalid */
9201
9202/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9203FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9204/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9205FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9206/* Opcode 0xf2 0x0f 0xfd - invalid */
9207
9208/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9209FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9210/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9211FNIEMOP_STUB(iemOp_paddd_Vx_W);
9212/* Opcode 0xf2 0x0f 0xfe - invalid */
9213
9214
9215/** Opcode **** 0x0f 0xff - UD0 */
9216FNIEMOP_DEF(iemOp_ud0)
9217{
9218 IEMOP_MNEMONIC(ud0, "ud0");
9219 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9220 {
9221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9222#ifndef TST_IEM_CHECK_MC
9223 RTGCPTR GCPtrEff;
9224 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9225 if (rcStrict != VINF_SUCCESS)
9226 return rcStrict;
9227#endif
9228 IEMOP_HLP_DONE_DECODING();
9229 }
9230 return IEMOP_RAISE_INVALID_OPCODE();
9231}
9232
9233
9234
9235/**
9236 * Two byte opcode map, first byte 0x0f.
9237 *
9238 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9239 * check if it needs updating as well when making changes.
9240 */
9241IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9242{
9243 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9244 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9245 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9246 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9247 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9248 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9249 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9250 /* 0x06 */ IEMOP_X4(iemOp_clts),
9251 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9252 /* 0x08 */ IEMOP_X4(iemOp_invd),
9253 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9254 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9255 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9256 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9257 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9258 /* 0x0e */ IEMOP_X4(iemOp_femms),
9259 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9260
9261 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9262 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9263 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9264 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9265 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9266 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9267 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9268 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9269 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9270 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9271 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9272 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9273 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9274 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9275 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9276 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9277
9278 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9279 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9280 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9281 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9282 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9283 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9284 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9285 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9286 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9287 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9288 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9289 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9290 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9291 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9292 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9293 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9294
9295 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9296 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9297 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9298 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9299 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9300 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9301 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9302 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9303 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9304 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9305 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9306 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9307 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9308 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9309 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9310 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9311
9312 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9313 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9314 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9315 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9316 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9317 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9318 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9319 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9320 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9321 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9322 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9323 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9324 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9325 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9326 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9327 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9328
9329 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9330 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9331 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9332 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9333 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9334 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9335 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9336 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9337 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9338 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9339 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9340 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9341 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9342 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9343 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9344 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9345
9346 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9347 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9348 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9349 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9350 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9351 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9352 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9353 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9354 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9355 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9356 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9357 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9358 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9359 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9360 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9361 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9362
9363 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9364 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9365 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9366 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9367 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9368 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9369 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9370 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9371
9372 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9373 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9374 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9375 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9376 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9377 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9378 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9379 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9380
9381 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9382 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9383 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9384 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9385 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9386 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9387 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9388 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9389 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9390 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9391 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9392 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9393 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9394 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9395 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9396 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9397
9398 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9399 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9400 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9401 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9402 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9403 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9404 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9405 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9406 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9407 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9408 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9409 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9410 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9411 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9412 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9413 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9414
9415 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9416 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9417 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9418 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9419 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9420 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9421 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9422 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9423 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9424 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9425 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9426 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9427 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9428 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9429 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9430 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9431
9432 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9433 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9434 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9435 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9436 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9437 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9438 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9439 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9440 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9441 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9442 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9443 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9444 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9445 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9446 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9447 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9448
9449 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9450 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9451 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9452 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9453 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9454 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9455 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9456 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9457 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9458 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9459 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9460 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9461 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9462 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9463 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9464 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9465
9466 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9467 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9468 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9469 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9470 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9471 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9472 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9473 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9474 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9475 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9476 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9477 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9478 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9479 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9480 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9481 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9482
9483 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9484 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9485 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9486 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9487 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9488 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9489 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9490 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9491 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9492 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9493 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9494 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9495 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9496 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9497 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9498 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9499
9500 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9501 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9502 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9503 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9504 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9505 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9506 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9507 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9508 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9509 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9510 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9511 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9512 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9513 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9514 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9515 /* 0xff */ IEMOP_X4(iemOp_ud0),
9516};
9517AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9518
9519/** @} */
9520
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette