VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 71340

Last change on this file since 71340 was 71092, checked in by vboxsync, 7 years ago

VMM/IEM: Nested Hw.virt: Implement SVM decode-assist and NRIP feature.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 335.3 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 71092 2018-02-22 09:14:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_DEF(iemOp_femms)
970{
971 IEMOP_MNEMONIC(femms, "femms");
972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
973
974 IEM_MC_BEGIN(0,0);
975 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
976 IEM_MC_MAYBE_RAISE_FPU_XCPT();
977 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
978 IEM_MC_FPU_FROM_MMX_MODE();
979 IEM_MC_ADVANCE_RIP();
980 IEM_MC_END();
981 return VINF_SUCCESS;
982}
983
984
985/** Opcode 0x0f 0x0f. */
986FNIEMOP_DEF(iemOp_3Dnow)
987{
988 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
989 {
990 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
991 return IEMOP_RAISE_INVALID_OPCODE();
992 }
993
994#ifdef IEM_WITH_3DNOW
995 /* This is pretty sparse, use switch instead of table. */
996 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
997 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
998#else
999 IEMOP_BITCH_ABOUT_STUB();
1000 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1001#endif
1002}
1003
1004
1005/**
1006 * @opcode 0x10
1007 * @oppfx none
1008 * @opcpuid sse
1009 * @opgroup og_sse_simdfp_datamove
1010 * @opxcpttype 4UA
1011 * @optest op1=1 op2=2 -> op1=2
1012 * @optest op1=0 op2=-22 -> op1=-22
1013 */
1014FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1015{
1016 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1019 {
1020 /*
1021 * Register, register.
1022 */
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1024 IEM_MC_BEGIN(0, 0);
1025 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1026 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1027 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1028 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1029 IEM_MC_ADVANCE_RIP();
1030 IEM_MC_END();
1031 }
1032 else
1033 {
1034 /*
1035 * Memory, register.
1036 */
1037 IEM_MC_BEGIN(0, 2);
1038 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1040
1041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1044 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1045
1046 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1047 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1048
1049 IEM_MC_ADVANCE_RIP();
1050 IEM_MC_END();
1051 }
1052 return VINF_SUCCESS;
1053
1054}
1055
1056
1057/**
1058 * @opcode 0x10
1059 * @oppfx 0x66
1060 * @opcpuid sse2
1061 * @opgroup og_sse2_pcksclr_datamove
1062 * @opxcpttype 4UA
1063 * @optest op1=1 op2=2 -> op1=2
1064 * @optest op1=0 op2=-42 -> op1=-42
1065 */
1066FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1067{
1068 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1071 {
1072 /*
1073 * Register, register.
1074 */
1075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1076 IEM_MC_BEGIN(0, 0);
1077 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1078 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1079 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1080 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1081 IEM_MC_ADVANCE_RIP();
1082 IEM_MC_END();
1083 }
1084 else
1085 {
1086 /*
1087 * Memory, register.
1088 */
1089 IEM_MC_BEGIN(0, 2);
1090 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1092
1093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1096 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1097
1098 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1099 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1100
1101 IEM_MC_ADVANCE_RIP();
1102 IEM_MC_END();
1103 }
1104 return VINF_SUCCESS;
1105}
1106
1107
1108/**
1109 * @opcode 0x10
1110 * @oppfx 0xf3
1111 * @opcpuid sse
1112 * @opgroup og_sse_simdfp_datamove
1113 * @opxcpttype 5
1114 * @optest op1=1 op2=2 -> op1=2
1115 * @optest op1=0 op2=-22 -> op1=-22
1116 */
1117FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1118{
1119 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1121 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1122 {
1123 /*
1124 * Register, register.
1125 */
1126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1127 IEM_MC_BEGIN(0, 1);
1128 IEM_MC_LOCAL(uint32_t, uSrc);
1129
1130 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1131 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1132 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1133 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1134
1135 IEM_MC_ADVANCE_RIP();
1136 IEM_MC_END();
1137 }
1138 else
1139 {
1140 /*
1141 * Memory, register.
1142 */
1143 IEM_MC_BEGIN(0, 2);
1144 IEM_MC_LOCAL(uint32_t, uSrc);
1145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1146
1147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1149 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1150 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1151
1152 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1153 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1154
1155 IEM_MC_ADVANCE_RIP();
1156 IEM_MC_END();
1157 }
1158 return VINF_SUCCESS;
1159}
1160
1161
1162/**
1163 * @opcode 0x10
1164 * @oppfx 0xf2
1165 * @opcpuid sse2
1166 * @opgroup og_sse2_pcksclr_datamove
1167 * @opxcpttype 5
1168 * @optest op1=1 op2=2 -> op1=2
1169 * @optest op1=0 op2=-42 -> op1=-42
1170 */
1171FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1172{
1173 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1175 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1176 {
1177 /*
1178 * Register, register.
1179 */
1180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1181 IEM_MC_BEGIN(0, 1);
1182 IEM_MC_LOCAL(uint64_t, uSrc);
1183
1184 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1185 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1186 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1187 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1188
1189 IEM_MC_ADVANCE_RIP();
1190 IEM_MC_END();
1191 }
1192 else
1193 {
1194 /*
1195 * Memory, register.
1196 */
1197 IEM_MC_BEGIN(0, 2);
1198 IEM_MC_LOCAL(uint64_t, uSrc);
1199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1200
1201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1203 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1204 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1205
1206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1207 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1208
1209 IEM_MC_ADVANCE_RIP();
1210 IEM_MC_END();
1211 }
1212 return VINF_SUCCESS;
1213}
1214
1215
1216/**
1217 * @opcode 0x11
1218 * @oppfx none
1219 * @opcpuid sse
1220 * @opgroup og_sse_simdfp_datamove
1221 * @opxcpttype 4UA
1222 * @optest op1=1 op2=2 -> op1=2
1223 * @optest op1=0 op2=-42 -> op1=-42
1224 */
1225FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1226{
1227 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1229 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1230 {
1231 /*
1232 * Register, register.
1233 */
1234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1235 IEM_MC_BEGIN(0, 0);
1236 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1237 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1238 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1239 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1240 IEM_MC_ADVANCE_RIP();
1241 IEM_MC_END();
1242 }
1243 else
1244 {
1245 /*
1246 * Memory, register.
1247 */
1248 IEM_MC_BEGIN(0, 2);
1249 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1251
1252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1254 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1255 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1256
1257 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1258 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1259
1260 IEM_MC_ADVANCE_RIP();
1261 IEM_MC_END();
1262 }
1263 return VINF_SUCCESS;
1264}
1265
1266
1267/**
1268 * @opcode 0x11
1269 * @oppfx 0x66
1270 * @opcpuid sse2
1271 * @opgroup og_sse2_pcksclr_datamove
1272 * @opxcpttype 4UA
1273 * @optest op1=1 op2=2 -> op1=2
1274 * @optest op1=0 op2=-42 -> op1=-42
1275 */
1276FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1277{
1278 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1280 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1281 {
1282 /*
1283 * Register, register.
1284 */
1285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1286 IEM_MC_BEGIN(0, 0);
1287 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1288 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1289 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1290 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1291 IEM_MC_ADVANCE_RIP();
1292 IEM_MC_END();
1293 }
1294 else
1295 {
1296 /*
1297 * Memory, register.
1298 */
1299 IEM_MC_BEGIN(0, 2);
1300 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1302
1303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1305 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1306 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1307
1308 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1309 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1310
1311 IEM_MC_ADVANCE_RIP();
1312 IEM_MC_END();
1313 }
1314 return VINF_SUCCESS;
1315}
1316
1317
1318/**
1319 * @opcode 0x11
1320 * @oppfx 0xf3
1321 * @opcpuid sse
1322 * @opgroup og_sse_simdfp_datamove
1323 * @opxcpttype 5
1324 * @optest op1=1 op2=2 -> op1=2
1325 * @optest op1=0 op2=-22 -> op1=-22
1326 */
1327FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1328{
1329 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1331 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1332 {
1333 /*
1334 * Register, register.
1335 */
1336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1337 IEM_MC_BEGIN(0, 1);
1338 IEM_MC_LOCAL(uint32_t, uSrc);
1339
1340 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1341 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1342 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1343 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1344
1345 IEM_MC_ADVANCE_RIP();
1346 IEM_MC_END();
1347 }
1348 else
1349 {
1350 /*
1351 * Memory, register.
1352 */
1353 IEM_MC_BEGIN(0, 2);
1354 IEM_MC_LOCAL(uint32_t, uSrc);
1355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1356
1357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1359 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1360 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1361
1362 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1363 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1364
1365 IEM_MC_ADVANCE_RIP();
1366 IEM_MC_END();
1367 }
1368 return VINF_SUCCESS;
1369}
1370
1371
1372/**
1373 * @opcode 0x11
1374 * @oppfx 0xf2
1375 * @opcpuid sse2
1376 * @opgroup og_sse2_pcksclr_datamove
1377 * @opxcpttype 5
1378 * @optest op1=1 op2=2 -> op1=2
1379 * @optest op1=0 op2=-42 -> op1=-42
1380 */
1381FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1382{
1383 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1385 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1386 {
1387 /*
1388 * Register, register.
1389 */
1390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1391 IEM_MC_BEGIN(0, 1);
1392 IEM_MC_LOCAL(uint64_t, uSrc);
1393
1394 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1396 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1397 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1398
1399 IEM_MC_ADVANCE_RIP();
1400 IEM_MC_END();
1401 }
1402 else
1403 {
1404 /*
1405 * Memory, register.
1406 */
1407 IEM_MC_BEGIN(0, 2);
1408 IEM_MC_LOCAL(uint64_t, uSrc);
1409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1410
1411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1413 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1415
1416 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1417 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1418
1419 IEM_MC_ADVANCE_RIP();
1420 IEM_MC_END();
1421 }
1422 return VINF_SUCCESS;
1423}
1424
1425
1426FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1427{
1428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1430 {
1431 /**
1432 * @opcode 0x12
1433 * @opcodesub 11 mr/reg
1434 * @oppfx none
1435 * @opcpuid sse
1436 * @opgroup og_sse_simdfp_datamove
1437 * @opxcpttype 5
1438 * @optest op1=1 op2=2 -> op1=2
1439 * @optest op1=0 op2=-42 -> op1=-42
1440 */
1441 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1442
1443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1444 IEM_MC_BEGIN(0, 1);
1445 IEM_MC_LOCAL(uint64_t, uSrc);
1446
1447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1449 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1450 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1451
1452 IEM_MC_ADVANCE_RIP();
1453 IEM_MC_END();
1454 }
1455 else
1456 {
1457 /**
1458 * @opdone
1459 * @opcode 0x12
1460 * @opcodesub !11 mr/reg
1461 * @oppfx none
1462 * @opcpuid sse
1463 * @opgroup og_sse_simdfp_datamove
1464 * @opxcpttype 5
1465 * @optest op1=1 op2=2 -> op1=2
1466 * @optest op1=0 op2=-42 -> op1=-42
1467 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1468 */
1469 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1470
1471 IEM_MC_BEGIN(0, 2);
1472 IEM_MC_LOCAL(uint64_t, uSrc);
1473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1474
1475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1477 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1478 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1479
1480 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1481 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1482
1483 IEM_MC_ADVANCE_RIP();
1484 IEM_MC_END();
1485 }
1486 return VINF_SUCCESS;
1487}
1488
1489
1490/**
1491 * @opcode 0x12
1492 * @opcodesub !11 mr/reg
1493 * @oppfx 0x66
1494 * @opcpuid sse2
1495 * @opgroup og_sse2_pcksclr_datamove
1496 * @opxcpttype 5
1497 * @optest op1=1 op2=2 -> op1=2
1498 * @optest op1=0 op2=-42 -> op1=-42
1499 */
1500FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1501{
1502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1503 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1504 {
1505 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1506
1507 IEM_MC_BEGIN(0, 2);
1508 IEM_MC_LOCAL(uint64_t, uSrc);
1509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1510
1511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1513 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1514 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1515
1516 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1517 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1518
1519 IEM_MC_ADVANCE_RIP();
1520 IEM_MC_END();
1521 return VINF_SUCCESS;
1522 }
1523
1524 /**
1525 * @opdone
1526 * @opmnemonic ud660f12m3
1527 * @opcode 0x12
1528 * @opcodesub 11 mr/reg
1529 * @oppfx 0x66
1530 * @opunused immediate
1531 * @opcpuid sse
1532 * @optest ->
1533 */
1534 return IEMOP_RAISE_INVALID_OPCODE();
1535}
1536
1537
1538/**
1539 * @opcode 0x12
1540 * @oppfx 0xf3
1541 * @opcpuid sse3
1542 * @opgroup og_sse3_pcksclr_datamove
1543 * @opxcpttype 4
1544 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1545 * op1=0x00000002000000020000000100000001
1546 */
1547FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1548{
1549 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1551 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1552 {
1553 /*
1554 * Register, register.
1555 */
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1557 IEM_MC_BEGIN(2, 0);
1558 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1559 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1560
1561 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1562 IEM_MC_PREPARE_SSE_USAGE();
1563
1564 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1565 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1566 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1567
1568 IEM_MC_ADVANCE_RIP();
1569 IEM_MC_END();
1570 }
1571 else
1572 {
1573 /*
1574 * Register, memory.
1575 */
1576 IEM_MC_BEGIN(2, 2);
1577 IEM_MC_LOCAL(RTUINT128U, uSrc);
1578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1579 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1580 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1581
1582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1584 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1585 IEM_MC_PREPARE_SSE_USAGE();
1586
1587 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1588 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1589 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1590
1591 IEM_MC_ADVANCE_RIP();
1592 IEM_MC_END();
1593 }
1594 return VINF_SUCCESS;
1595}
1596
1597
1598/**
1599 * @opcode 0x12
1600 * @oppfx 0xf2
1601 * @opcpuid sse3
1602 * @opgroup og_sse3_pcksclr_datamove
1603 * @opxcpttype 5
1604 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1605 * op1=0x22222222111111112222222211111111
1606 */
1607FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1608{
1609 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1611 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1612 {
1613 /*
1614 * Register, register.
1615 */
1616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1617 IEM_MC_BEGIN(2, 0);
1618 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1619 IEM_MC_ARG(uint64_t, uSrc, 1);
1620
1621 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1622 IEM_MC_PREPARE_SSE_USAGE();
1623
1624 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1625 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1626 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1627
1628 IEM_MC_ADVANCE_RIP();
1629 IEM_MC_END();
1630 }
1631 else
1632 {
1633 /*
1634 * Register, memory.
1635 */
1636 IEM_MC_BEGIN(2, 2);
1637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1638 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1639 IEM_MC_ARG(uint64_t, uSrc, 1);
1640
1641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1643 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1644 IEM_MC_PREPARE_SSE_USAGE();
1645
1646 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1647 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1648 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1649
1650 IEM_MC_ADVANCE_RIP();
1651 IEM_MC_END();
1652 }
1653 return VINF_SUCCESS;
1654}
1655
1656
1657/**
1658 * @opcode 0x13
1659 * @opcodesub !11 mr/reg
1660 * @oppfx none
1661 * @opcpuid sse
1662 * @opgroup og_sse_simdfp_datamove
1663 * @opxcpttype 5
1664 * @optest op1=1 op2=2 -> op1=2
1665 * @optest op1=0 op2=-42 -> op1=-42
1666 */
1667FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1668{
1669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1670 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1671 {
1672 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1673
1674 IEM_MC_BEGIN(0, 2);
1675 IEM_MC_LOCAL(uint64_t, uSrc);
1676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1677
1678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1680 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1681 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1682
1683 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1684 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1685
1686 IEM_MC_ADVANCE_RIP();
1687 IEM_MC_END();
1688 return VINF_SUCCESS;
1689 }
1690
1691 /**
1692 * @opdone
1693 * @opmnemonic ud0f13m3
1694 * @opcode 0x13
1695 * @opcodesub 11 mr/reg
1696 * @oppfx none
1697 * @opunused immediate
1698 * @opcpuid sse
1699 * @optest ->
1700 */
1701 return IEMOP_RAISE_INVALID_OPCODE();
1702}
1703
1704
1705/**
1706 * @opcode 0x13
1707 * @opcodesub !11 mr/reg
1708 * @oppfx 0x66
1709 * @opcpuid sse2
1710 * @opgroup og_sse2_pcksclr_datamove
1711 * @opxcpttype 5
1712 * @optest op1=1 op2=2 -> op1=2
1713 * @optest op1=0 op2=-42 -> op1=-42
1714 */
1715FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1716{
1717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1718 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1719 {
1720 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1721 IEM_MC_BEGIN(0, 2);
1722 IEM_MC_LOCAL(uint64_t, uSrc);
1723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1724
1725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1727 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1728 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1729
1730 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1731 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1732
1733 IEM_MC_ADVANCE_RIP();
1734 IEM_MC_END();
1735 return VINF_SUCCESS;
1736 }
1737
1738 /**
1739 * @opdone
1740 * @opmnemonic ud660f13m3
1741 * @opcode 0x13
1742 * @opcodesub 11 mr/reg
1743 * @oppfx 0x66
1744 * @opunused immediate
1745 * @opcpuid sse
1746 * @optest ->
1747 */
1748 return IEMOP_RAISE_INVALID_OPCODE();
1749}
1750
1751
1752/**
1753 * @opmnemonic udf30f13
1754 * @opcode 0x13
1755 * @oppfx 0xf3
1756 * @opunused intel-modrm
1757 * @opcpuid sse
1758 * @optest ->
1759 * @opdone
1760 */
1761
1762/**
1763 * @opmnemonic udf20f13
1764 * @opcode 0x13
1765 * @oppfx 0xf2
1766 * @opunused intel-modrm
1767 * @opcpuid sse
1768 * @optest ->
1769 * @opdone
1770 */
1771
1772/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1773FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1774/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1775FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1776
1777/**
1778 * @opdone
1779 * @opmnemonic udf30f14
1780 * @opcode 0x14
1781 * @oppfx 0xf3
1782 * @opunused intel-modrm
1783 * @opcpuid sse
1784 * @optest ->
1785 * @opdone
1786 */
1787
1788/**
1789 * @opmnemonic udf20f14
1790 * @opcode 0x14
1791 * @oppfx 0xf2
1792 * @opunused intel-modrm
1793 * @opcpuid sse
1794 * @optest ->
1795 * @opdone
1796 */
1797
1798/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1799FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1800/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1801FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1802/* Opcode 0xf3 0x0f 0x15 - invalid */
1803/* Opcode 0xf2 0x0f 0x15 - invalid */
1804
1805/**
1806 * @opdone
1807 * @opmnemonic udf30f15
1808 * @opcode 0x15
1809 * @oppfx 0xf3
1810 * @opunused intel-modrm
1811 * @opcpuid sse
1812 * @optest ->
1813 * @opdone
1814 */
1815
1816/**
1817 * @opmnemonic udf20f15
1818 * @opcode 0x15
1819 * @oppfx 0xf2
1820 * @opunused intel-modrm
1821 * @opcpuid sse
1822 * @optest ->
1823 * @opdone
1824 */
1825
1826FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1827{
1828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1829 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1830 {
1831 /**
1832 * @opcode 0x16
1833 * @opcodesub 11 mr/reg
1834 * @oppfx none
1835 * @opcpuid sse
1836 * @opgroup og_sse_simdfp_datamove
1837 * @opxcpttype 5
1838 * @optest op1=1 op2=2 -> op1=2
1839 * @optest op1=0 op2=-42 -> op1=-42
1840 */
1841 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1842
1843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1844 IEM_MC_BEGIN(0, 1);
1845 IEM_MC_LOCAL(uint64_t, uSrc);
1846
1847 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1848 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1849 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1850 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1851
1852 IEM_MC_ADVANCE_RIP();
1853 IEM_MC_END();
1854 }
1855 else
1856 {
1857 /**
1858 * @opdone
1859 * @opcode 0x16
1860 * @opcodesub !11 mr/reg
1861 * @oppfx none
1862 * @opcpuid sse
1863 * @opgroup og_sse_simdfp_datamove
1864 * @opxcpttype 5
1865 * @optest op1=1 op2=2 -> op1=2
1866 * @optest op1=0 op2=-42 -> op1=-42
1867 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1868 */
1869 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1870
1871 IEM_MC_BEGIN(0, 2);
1872 IEM_MC_LOCAL(uint64_t, uSrc);
1873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1874
1875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1877 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1878 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1879
1880 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1881 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1882
1883 IEM_MC_ADVANCE_RIP();
1884 IEM_MC_END();
1885 }
1886 return VINF_SUCCESS;
1887}
1888
1889
1890/**
1891 * @opcode 0x16
1892 * @opcodesub !11 mr/reg
1893 * @oppfx 0x66
1894 * @opcpuid sse2
1895 * @opgroup og_sse2_pcksclr_datamove
1896 * @opxcpttype 5
1897 * @optest op1=1 op2=2 -> op1=2
1898 * @optest op1=0 op2=-42 -> op1=-42
1899 */
1900FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1901{
1902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1903 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1904 {
1905 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1906 IEM_MC_BEGIN(0, 2);
1907 IEM_MC_LOCAL(uint64_t, uSrc);
1908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1909
1910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1912 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1913 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1914
1915 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1916 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1917
1918 IEM_MC_ADVANCE_RIP();
1919 IEM_MC_END();
1920 return VINF_SUCCESS;
1921 }
1922
1923 /**
1924 * @opdone
1925 * @opmnemonic ud660f16m3
1926 * @opcode 0x16
1927 * @opcodesub 11 mr/reg
1928 * @oppfx 0x66
1929 * @opunused immediate
1930 * @opcpuid sse
1931 * @optest ->
1932 */
1933 return IEMOP_RAISE_INVALID_OPCODE();
1934}
1935
1936
1937/**
1938 * @opcode 0x16
1939 * @oppfx 0xf3
1940 * @opcpuid sse3
1941 * @opgroup og_sse3_pcksclr_datamove
1942 * @opxcpttype 4
1943 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1944 * op1=0x00000002000000020000000100000001
1945 */
1946FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1947{
1948 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1949 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1951 {
1952 /*
1953 * Register, register.
1954 */
1955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1956 IEM_MC_BEGIN(2, 0);
1957 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1958 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1959
1960 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1961 IEM_MC_PREPARE_SSE_USAGE();
1962
1963 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1964 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1965 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1966
1967 IEM_MC_ADVANCE_RIP();
1968 IEM_MC_END();
1969 }
1970 else
1971 {
1972 /*
1973 * Register, memory.
1974 */
1975 IEM_MC_BEGIN(2, 2);
1976 IEM_MC_LOCAL(RTUINT128U, uSrc);
1977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1978 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1979 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1980
1981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1983 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1984 IEM_MC_PREPARE_SSE_USAGE();
1985
1986 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1987 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1988 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1989
1990 IEM_MC_ADVANCE_RIP();
1991 IEM_MC_END();
1992 }
1993 return VINF_SUCCESS;
1994}
1995
1996/**
1997 * @opdone
1998 * @opmnemonic udf30f16
1999 * @opcode 0x16
2000 * @oppfx 0xf2
2001 * @opunused intel-modrm
2002 * @opcpuid sse
2003 * @optest ->
2004 * @opdone
2005 */
2006
2007
2008/**
2009 * @opcode 0x17
2010 * @opcodesub !11 mr/reg
2011 * @oppfx none
2012 * @opcpuid sse
2013 * @opgroup og_sse_simdfp_datamove
2014 * @opxcpttype 5
2015 * @optest op1=1 op2=2 -> op1=2
2016 * @optest op1=0 op2=-42 -> op1=-42
2017 */
2018FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2019{
2020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2021 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2022 {
2023 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2024
2025 IEM_MC_BEGIN(0, 2);
2026 IEM_MC_LOCAL(uint64_t, uSrc);
2027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2028
2029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2031 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2032 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2033
2034 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2035 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2036
2037 IEM_MC_ADVANCE_RIP();
2038 IEM_MC_END();
2039 return VINF_SUCCESS;
2040 }
2041
2042 /**
2043 * @opdone
2044 * @opmnemonic ud0f17m3
2045 * @opcode 0x17
2046 * @opcodesub 11 mr/reg
2047 * @oppfx none
2048 * @opunused immediate
2049 * @opcpuid sse
2050 * @optest ->
2051 */
2052 return IEMOP_RAISE_INVALID_OPCODE();
2053}
2054
2055
2056/**
2057 * @opcode 0x17
2058 * @opcodesub !11 mr/reg
2059 * @oppfx 0x66
2060 * @opcpuid sse2
2061 * @opgroup og_sse2_pcksclr_datamove
2062 * @opxcpttype 5
2063 * @optest op1=1 op2=2 -> op1=2
2064 * @optest op1=0 op2=-42 -> op1=-42
2065 */
2066FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2067{
2068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2069 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2070 {
2071 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2072
2073 IEM_MC_BEGIN(0, 2);
2074 IEM_MC_LOCAL(uint64_t, uSrc);
2075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2076
2077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2079 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2080 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2081
2082 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2083 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2084
2085 IEM_MC_ADVANCE_RIP();
2086 IEM_MC_END();
2087 return VINF_SUCCESS;
2088 }
2089
2090 /**
2091 * @opdone
2092 * @opmnemonic ud660f17m3
2093 * @opcode 0x17
2094 * @opcodesub 11 mr/reg
2095 * @oppfx 0x66
2096 * @opunused immediate
2097 * @opcpuid sse
2098 * @optest ->
2099 */
2100 return IEMOP_RAISE_INVALID_OPCODE();
2101}
2102
2103
2104/**
2105 * @opdone
2106 * @opmnemonic udf30f17
2107 * @opcode 0x17
2108 * @oppfx 0xf3
2109 * @opunused intel-modrm
2110 * @opcpuid sse
2111 * @optest ->
2112 * @opdone
2113 */
2114
2115/**
2116 * @opmnemonic udf20f17
2117 * @opcode 0x17
2118 * @oppfx 0xf2
2119 * @opunused intel-modrm
2120 * @opcpuid sse
2121 * @optest ->
2122 * @opdone
2123 */
2124
2125
2126/** Opcode 0x0f 0x18. */
2127FNIEMOP_DEF(iemOp_prefetch_Grp16)
2128{
2129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2130 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2131 {
2132 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2133 {
2134 case 4: /* Aliased to /0 for the time being according to AMD. */
2135 case 5: /* Aliased to /0 for the time being according to AMD. */
2136 case 6: /* Aliased to /0 for the time being according to AMD. */
2137 case 7: /* Aliased to /0 for the time being according to AMD. */
2138 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2139 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2140 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2141 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2143 }
2144
2145 IEM_MC_BEGIN(0, 1);
2146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2149 /* Currently a NOP. */
2150 NOREF(GCPtrEffSrc);
2151 IEM_MC_ADVANCE_RIP();
2152 IEM_MC_END();
2153 return VINF_SUCCESS;
2154 }
2155
2156 return IEMOP_RAISE_INVALID_OPCODE();
2157}
2158
2159
2160/** Opcode 0x0f 0x19..0x1f. */
2161FNIEMOP_DEF(iemOp_nop_Ev)
2162{
2163 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2166 {
2167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2168 IEM_MC_BEGIN(0, 0);
2169 IEM_MC_ADVANCE_RIP();
2170 IEM_MC_END();
2171 }
2172 else
2173 {
2174 IEM_MC_BEGIN(0, 1);
2175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2178 /* Currently a NOP. */
2179 NOREF(GCPtrEffSrc);
2180 IEM_MC_ADVANCE_RIP();
2181 IEM_MC_END();
2182 }
2183 return VINF_SUCCESS;
2184}
2185
2186
2187/** Opcode 0x0f 0x20. */
2188FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2189{
2190 /* mod is ignored, as is operand size overrides. */
2191 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2192 IEMOP_HLP_MIN_386();
2193 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2194 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2195 else
2196 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2197
2198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2199 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2200 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2201 {
2202 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2203 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2204 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2205 iCrReg |= 8;
2206 }
2207 switch (iCrReg)
2208 {
2209 case 0: case 2: case 3: case 4: case 8:
2210 break;
2211 default:
2212 return IEMOP_RAISE_INVALID_OPCODE();
2213 }
2214 IEMOP_HLP_DONE_DECODING();
2215
2216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2217}
2218
2219
2220/** Opcode 0x0f 0x21. */
2221FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2222{
2223 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2224 IEMOP_HLP_MIN_386();
2225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2227 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2228 return IEMOP_RAISE_INVALID_OPCODE();
2229 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2230 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2231 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2232}
2233
2234
2235/** Opcode 0x0f 0x22. */
2236FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2237{
2238 /* mod is ignored, as is operand size overrides. */
2239 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2240 IEMOP_HLP_MIN_386();
2241 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2242 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2243 else
2244 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2245
2246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2247 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2248 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2249 {
2250 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2251 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2252 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2253 iCrReg |= 8;
2254 }
2255 switch (iCrReg)
2256 {
2257 case 0: case 2: case 3: case 4: case 8:
2258 break;
2259 default:
2260 return IEMOP_RAISE_INVALID_OPCODE();
2261 }
2262 IEMOP_HLP_DONE_DECODING();
2263
2264 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2265}
2266
2267
2268/** Opcode 0x0f 0x23. */
2269FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2270{
2271 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2272 IEMOP_HLP_MIN_386();
2273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2275 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2276 return IEMOP_RAISE_INVALID_OPCODE();
2277 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2278 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2279 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2280}
2281
2282
2283/** Opcode 0x0f 0x24. */
2284FNIEMOP_DEF(iemOp_mov_Rd_Td)
2285{
2286 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2287 /** @todo works on 386 and 486. */
2288 /* The RM byte is not considered, see testcase. */
2289 return IEMOP_RAISE_INVALID_OPCODE();
2290}
2291
2292
2293/** Opcode 0x0f 0x26. */
2294FNIEMOP_DEF(iemOp_mov_Td_Rd)
2295{
2296 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2297 /** @todo works on 386 and 486. */
2298 /* The RM byte is not considered, see testcase. */
2299 return IEMOP_RAISE_INVALID_OPCODE();
2300}
2301
2302
2303/**
2304 * @opcode 0x28
2305 * @oppfx none
2306 * @opcpuid sse
2307 * @opgroup og_sse_simdfp_datamove
2308 * @opxcpttype 1
2309 * @optest op1=1 op2=2 -> op1=2
2310 * @optest op1=0 op2=-42 -> op1=-42
2311 */
2312FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2313{
2314 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2316 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2317 {
2318 /*
2319 * Register, register.
2320 */
2321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2322 IEM_MC_BEGIN(0, 0);
2323 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2324 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2325 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2326 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2327 IEM_MC_ADVANCE_RIP();
2328 IEM_MC_END();
2329 }
2330 else
2331 {
2332 /*
2333 * Register, memory.
2334 */
2335 IEM_MC_BEGIN(0, 2);
2336 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2338
2339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2341 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2342 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2343
2344 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2345 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2346
2347 IEM_MC_ADVANCE_RIP();
2348 IEM_MC_END();
2349 }
2350 return VINF_SUCCESS;
2351}
2352
2353/**
2354 * @opcode 0x28
2355 * @oppfx 66
2356 * @opcpuid sse2
2357 * @opgroup og_sse2_pcksclr_datamove
2358 * @opxcpttype 1
2359 * @optest op1=1 op2=2 -> op1=2
2360 * @optest op1=0 op2=-42 -> op1=-42
2361 */
2362FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2363{
2364 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2366 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2367 {
2368 /*
2369 * Register, register.
2370 */
2371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2372 IEM_MC_BEGIN(0, 0);
2373 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2374 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2375 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2376 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2377 IEM_MC_ADVANCE_RIP();
2378 IEM_MC_END();
2379 }
2380 else
2381 {
2382 /*
2383 * Register, memory.
2384 */
2385 IEM_MC_BEGIN(0, 2);
2386 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2388
2389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2391 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2392 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2393
2394 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2395 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2396
2397 IEM_MC_ADVANCE_RIP();
2398 IEM_MC_END();
2399 }
2400 return VINF_SUCCESS;
2401}
2402
2403/* Opcode 0xf3 0x0f 0x28 - invalid */
2404/* Opcode 0xf2 0x0f 0x28 - invalid */
2405
2406/**
2407 * @opcode 0x29
2408 * @oppfx none
2409 * @opcpuid sse
2410 * @opgroup og_sse_simdfp_datamove
2411 * @opxcpttype 1
2412 * @optest op1=1 op2=2 -> op1=2
2413 * @optest op1=0 op2=-42 -> op1=-42
2414 */
2415FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2416{
2417 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2419 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2420 {
2421 /*
2422 * Register, register.
2423 */
2424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2425 IEM_MC_BEGIN(0, 0);
2426 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2427 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2428 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2429 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2430 IEM_MC_ADVANCE_RIP();
2431 IEM_MC_END();
2432 }
2433 else
2434 {
2435 /*
2436 * Memory, register.
2437 */
2438 IEM_MC_BEGIN(0, 2);
2439 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2441
2442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2444 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2446
2447 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2448 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2449
2450 IEM_MC_ADVANCE_RIP();
2451 IEM_MC_END();
2452 }
2453 return VINF_SUCCESS;
2454}
2455
2456/**
2457 * @opcode 0x29
2458 * @oppfx 66
2459 * @opcpuid sse2
2460 * @opgroup og_sse2_pcksclr_datamove
2461 * @opxcpttype 1
2462 * @optest op1=1 op2=2 -> op1=2
2463 * @optest op1=0 op2=-42 -> op1=-42
2464 */
2465FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2466{
2467 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2469 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2470 {
2471 /*
2472 * Register, register.
2473 */
2474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2475 IEM_MC_BEGIN(0, 0);
2476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2478 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2479 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2480 IEM_MC_ADVANCE_RIP();
2481 IEM_MC_END();
2482 }
2483 else
2484 {
2485 /*
2486 * Memory, register.
2487 */
2488 IEM_MC_BEGIN(0, 2);
2489 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2491
2492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2494 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2496
2497 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2498 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2499
2500 IEM_MC_ADVANCE_RIP();
2501 IEM_MC_END();
2502 }
2503 return VINF_SUCCESS;
2504}
2505
2506/* Opcode 0xf3 0x0f 0x29 - invalid */
2507/* Opcode 0xf2 0x0f 0x29 - invalid */
2508
2509
2510/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2511FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2512/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2513FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2514/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2515FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2516/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2517FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2518
2519
2520/**
2521 * @opcode 0x2b
2522 * @opcodesub !11 mr/reg
2523 * @oppfx none
2524 * @opcpuid sse
2525 * @opgroup og_sse1_cachect
2526 * @opxcpttype 1
2527 * @optest op1=1 op2=2 -> op1=2
2528 * @optest op1=0 op2=-42 -> op1=-42
2529 */
2530FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2531{
2532 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2534 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2535 {
2536 /*
2537 * memory, register.
2538 */
2539 IEM_MC_BEGIN(0, 2);
2540 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2542
2543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2545 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2547
2548 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2549 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2550
2551 IEM_MC_ADVANCE_RIP();
2552 IEM_MC_END();
2553 }
2554 /* The register, register encoding is invalid. */
2555 else
2556 return IEMOP_RAISE_INVALID_OPCODE();
2557 return VINF_SUCCESS;
2558}
2559
2560/**
2561 * @opcode 0x2b
2562 * @opcodesub !11 mr/reg
2563 * @oppfx 0x66
2564 * @opcpuid sse2
2565 * @opgroup og_sse2_cachect
2566 * @opxcpttype 1
2567 * @optest op1=1 op2=2 -> op1=2
2568 * @optest op1=0 op2=-42 -> op1=-42
2569 */
2570FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2571{
2572 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2574 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2575 {
2576 /*
2577 * memory, register.
2578 */
2579 IEM_MC_BEGIN(0, 2);
2580 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2582
2583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2587
2588 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2589 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2590
2591 IEM_MC_ADVANCE_RIP();
2592 IEM_MC_END();
2593 }
2594 /* The register, register encoding is invalid. */
2595 else
2596 return IEMOP_RAISE_INVALID_OPCODE();
2597 return VINF_SUCCESS;
2598}
2599/* Opcode 0xf3 0x0f 0x2b - invalid */
2600/* Opcode 0xf2 0x0f 0x2b - invalid */
2601
2602
2603/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2604FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2605/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2606FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2607/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2608FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2609/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2610FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2611
2612/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2613FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2614/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2615FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2616/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2617FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2618/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2619FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2620
2621/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2622FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2623/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2624FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2625/* Opcode 0xf3 0x0f 0x2e - invalid */
2626/* Opcode 0xf2 0x0f 0x2e - invalid */
2627
2628/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2629FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2630/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2631FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2632/* Opcode 0xf3 0x0f 0x2f - invalid */
2633/* Opcode 0xf2 0x0f 0x2f - invalid */
2634
2635/** Opcode 0x0f 0x30. */
2636FNIEMOP_DEF(iemOp_wrmsr)
2637{
2638 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2640 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2641}
2642
2643
2644/** Opcode 0x0f 0x31. */
2645FNIEMOP_DEF(iemOp_rdtsc)
2646{
2647 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2649 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2650}
2651
2652
2653/** Opcode 0x0f 0x33. */
2654FNIEMOP_DEF(iemOp_rdmsr)
2655{
2656 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2658 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2659}
2660
2661
2662/** Opcode 0x0f 0x34. */
2663FNIEMOP_DEF(iemOp_rdpmc)
2664{
2665 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2667 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2668}
2669
2670
2671/** Opcode 0x0f 0x34. */
2672FNIEMOP_STUB(iemOp_sysenter);
2673/** Opcode 0x0f 0x35. */
2674FNIEMOP_STUB(iemOp_sysexit);
2675/** Opcode 0x0f 0x37. */
2676FNIEMOP_STUB(iemOp_getsec);
2677
2678
2679/** Opcode 0x0f 0x38. */
2680FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2681{
2682#ifdef IEM_WITH_THREE_0F_38
2683 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2684 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2685#else
2686 IEMOP_BITCH_ABOUT_STUB();
2687 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2688#endif
2689}
2690
2691
2692/** Opcode 0x0f 0x3a. */
2693FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2694{
2695#ifdef IEM_WITH_THREE_0F_3A
2696 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2697 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2698#else
2699 IEMOP_BITCH_ABOUT_STUB();
2700 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2701#endif
2702}
2703
2704
2705/**
2706 * Implements a conditional move.
2707 *
2708 * Wish there was an obvious way to do this where we could share and reduce
2709 * code bloat.
2710 *
2711 * @param a_Cnd The conditional "microcode" operation.
2712 */
2713#define CMOV_X(a_Cnd) \
2714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2715 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2716 { \
2717 switch (pVCpu->iem.s.enmEffOpSize) \
2718 { \
2719 case IEMMODE_16BIT: \
2720 IEM_MC_BEGIN(0, 1); \
2721 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2722 a_Cnd { \
2723 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2724 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2725 } IEM_MC_ENDIF(); \
2726 IEM_MC_ADVANCE_RIP(); \
2727 IEM_MC_END(); \
2728 return VINF_SUCCESS; \
2729 \
2730 case IEMMODE_32BIT: \
2731 IEM_MC_BEGIN(0, 1); \
2732 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2733 a_Cnd { \
2734 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2735 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2736 } IEM_MC_ELSE() { \
2737 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2738 } IEM_MC_ENDIF(); \
2739 IEM_MC_ADVANCE_RIP(); \
2740 IEM_MC_END(); \
2741 return VINF_SUCCESS; \
2742 \
2743 case IEMMODE_64BIT: \
2744 IEM_MC_BEGIN(0, 1); \
2745 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2746 a_Cnd { \
2747 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2748 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2749 } IEM_MC_ENDIF(); \
2750 IEM_MC_ADVANCE_RIP(); \
2751 IEM_MC_END(); \
2752 return VINF_SUCCESS; \
2753 \
2754 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2755 } \
2756 } \
2757 else \
2758 { \
2759 switch (pVCpu->iem.s.enmEffOpSize) \
2760 { \
2761 case IEMMODE_16BIT: \
2762 IEM_MC_BEGIN(0, 2); \
2763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2764 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2766 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2767 a_Cnd { \
2768 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2769 } IEM_MC_ENDIF(); \
2770 IEM_MC_ADVANCE_RIP(); \
2771 IEM_MC_END(); \
2772 return VINF_SUCCESS; \
2773 \
2774 case IEMMODE_32BIT: \
2775 IEM_MC_BEGIN(0, 2); \
2776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2777 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2779 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2780 a_Cnd { \
2781 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2782 } IEM_MC_ELSE() { \
2783 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2784 } IEM_MC_ENDIF(); \
2785 IEM_MC_ADVANCE_RIP(); \
2786 IEM_MC_END(); \
2787 return VINF_SUCCESS; \
2788 \
2789 case IEMMODE_64BIT: \
2790 IEM_MC_BEGIN(0, 2); \
2791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2792 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2794 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2795 a_Cnd { \
2796 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2797 } IEM_MC_ENDIF(); \
2798 IEM_MC_ADVANCE_RIP(); \
2799 IEM_MC_END(); \
2800 return VINF_SUCCESS; \
2801 \
2802 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2803 } \
2804 } do {} while (0)
2805
2806
2807
2808/** Opcode 0x0f 0x40. */
2809FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2810{
2811 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2812 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2813}
2814
2815
2816/** Opcode 0x0f 0x41. */
2817FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2818{
2819 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2820 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2821}
2822
2823
2824/** Opcode 0x0f 0x42. */
2825FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2826{
2827 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2828 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2829}
2830
2831
2832/** Opcode 0x0f 0x43. */
2833FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2834{
2835 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2836 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2837}
2838
2839
2840/** Opcode 0x0f 0x44. */
2841FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2842{
2843 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2844 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2845}
2846
2847
2848/** Opcode 0x0f 0x45. */
2849FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2850{
2851 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2852 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2853}
2854
2855
2856/** Opcode 0x0f 0x46. */
2857FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2858{
2859 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2860 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2861}
2862
2863
2864/** Opcode 0x0f 0x47. */
2865FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2866{
2867 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2868 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2869}
2870
2871
2872/** Opcode 0x0f 0x48. */
2873FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2874{
2875 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2876 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2877}
2878
2879
2880/** Opcode 0x0f 0x49. */
2881FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2882{
2883 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2884 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2885}
2886
2887
2888/** Opcode 0x0f 0x4a. */
2889FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2890{
2891 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2892 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2893}
2894
2895
2896/** Opcode 0x0f 0x4b. */
2897FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2898{
2899 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2900 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2901}
2902
2903
2904/** Opcode 0x0f 0x4c. */
2905FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2906{
2907 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2908 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2909}
2910
2911
2912/** Opcode 0x0f 0x4d. */
2913FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2914{
2915 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2916 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2917}
2918
2919
2920/** Opcode 0x0f 0x4e. */
2921FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2922{
2923 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2924 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2925}
2926
2927
2928/** Opcode 0x0f 0x4f. */
2929FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2930{
2931 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2932 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2933}
2934
2935#undef CMOV_X
2936
2937/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2938FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2939/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2940FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2941/* Opcode 0xf3 0x0f 0x50 - invalid */
2942/* Opcode 0xf2 0x0f 0x50 - invalid */
2943
2944/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2945FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2946/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2947FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2948/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2949FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2950/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2951FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2952
2953/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2954FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2955/* Opcode 0x66 0x0f 0x52 - invalid */
2956/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2957FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2958/* Opcode 0xf2 0x0f 0x52 - invalid */
2959
2960/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2961FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2962/* Opcode 0x66 0x0f 0x53 - invalid */
2963/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2964FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2965/* Opcode 0xf2 0x0f 0x53 - invalid */
2966
2967/** Opcode 0x0f 0x54 - andps Vps, Wps */
2968FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2969/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2970FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2971/* Opcode 0xf3 0x0f 0x54 - invalid */
2972/* Opcode 0xf2 0x0f 0x54 - invalid */
2973
2974/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2975FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2976/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2977FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2978/* Opcode 0xf3 0x0f 0x55 - invalid */
2979/* Opcode 0xf2 0x0f 0x55 - invalid */
2980
2981/** Opcode 0x0f 0x56 - orps Vps, Wps */
2982FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2983/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2984FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2985/* Opcode 0xf3 0x0f 0x56 - invalid */
2986/* Opcode 0xf2 0x0f 0x56 - invalid */
2987
2988/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2989FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2990/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2991FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2992/* Opcode 0xf3 0x0f 0x57 - invalid */
2993/* Opcode 0xf2 0x0f 0x57 - invalid */
2994
2995/** Opcode 0x0f 0x58 - addps Vps, Wps */
2996FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2997/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2998FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2999/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
3000FNIEMOP_STUB(iemOp_addss_Vss_Wss);
3001/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
3002FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
3003
3004/** Opcode 0x0f 0x59 - mulps Vps, Wps */
3005FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
3006/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
3007FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
3008/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
3009FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
3010/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
3011FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
3012
3013/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3014FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3015/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3016FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3017/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3018FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3019/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3020FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3021
3022/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3023FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3024/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3025FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3026/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3027FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3028/* Opcode 0xf2 0x0f 0x5b - invalid */
3029
3030/** Opcode 0x0f 0x5c - subps Vps, Wps */
3031FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3032/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3033FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3034/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3035FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3036/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3037FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3038
3039/** Opcode 0x0f 0x5d - minps Vps, Wps */
3040FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3041/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3042FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3043/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3044FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3045/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3046FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3047
3048/** Opcode 0x0f 0x5e - divps Vps, Wps */
3049FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3050/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3051FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3052/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3053FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3054/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3055FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3056
3057/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3058FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3059/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3060FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3061/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3062FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3063/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3064FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3065
3066/**
3067 * Common worker for MMX instructions on the forms:
3068 * pxxxx mm1, mm2/mem32
3069 *
3070 * The 2nd operand is the first half of a register, which in the memory case
3071 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3072 * memory accessed for MMX.
3073 *
3074 * Exceptions type 4.
3075 */
3076FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3077{
3078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3080 {
3081 /*
3082 * Register, register.
3083 */
3084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3085 IEM_MC_BEGIN(2, 0);
3086 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3087 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3088 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3089 IEM_MC_PREPARE_SSE_USAGE();
3090 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3091 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3092 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3093 IEM_MC_ADVANCE_RIP();
3094 IEM_MC_END();
3095 }
3096 else
3097 {
3098 /*
3099 * Register, memory.
3100 */
3101 IEM_MC_BEGIN(2, 2);
3102 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3103 IEM_MC_LOCAL(uint64_t, uSrc);
3104 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3106
3107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3109 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3110 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3111
3112 IEM_MC_PREPARE_SSE_USAGE();
3113 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3114 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3115
3116 IEM_MC_ADVANCE_RIP();
3117 IEM_MC_END();
3118 }
3119 return VINF_SUCCESS;
3120}
3121
3122
3123/**
3124 * Common worker for SSE2 instructions on the forms:
3125 * pxxxx xmm1, xmm2/mem128
3126 *
3127 * The 2nd operand is the first half of a register, which in the memory case
3128 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3129 * memory accessed for MMX.
3130 *
3131 * Exceptions type 4.
3132 */
3133FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3134{
3135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3136 if (!pImpl->pfnU64)
3137 return IEMOP_RAISE_INVALID_OPCODE();
3138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3139 {
3140 /*
3141 * Register, register.
3142 */
3143 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3144 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3146 IEM_MC_BEGIN(2, 0);
3147 IEM_MC_ARG(uint64_t *, pDst, 0);
3148 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3149 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3150 IEM_MC_PREPARE_FPU_USAGE();
3151 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3152 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3153 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3154 IEM_MC_ADVANCE_RIP();
3155 IEM_MC_END();
3156 }
3157 else
3158 {
3159 /*
3160 * Register, memory.
3161 */
3162 IEM_MC_BEGIN(2, 2);
3163 IEM_MC_ARG(uint64_t *, pDst, 0);
3164 IEM_MC_LOCAL(uint32_t, uSrc);
3165 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3167
3168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3170 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3171 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3172
3173 IEM_MC_PREPARE_FPU_USAGE();
3174 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3175 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3176
3177 IEM_MC_ADVANCE_RIP();
3178 IEM_MC_END();
3179 }
3180 return VINF_SUCCESS;
3181}
3182
3183
3184/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3185FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3186{
3187 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3188 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3189}
3190
3191/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3192FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3193{
3194 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3195 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3196}
3197
3198/* Opcode 0xf3 0x0f 0x60 - invalid */
3199
3200
3201/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3202FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3203{
3204 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3205 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3206}
3207
3208/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3209FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3210{
3211 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3212 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3213}
3214
3215/* Opcode 0xf3 0x0f 0x61 - invalid */
3216
3217
3218/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3219FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3220{
3221 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3222 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3223}
3224
3225/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3226FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3227{
3228 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3229 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3230}
3231
3232/* Opcode 0xf3 0x0f 0x62 - invalid */
3233
3234
3235
3236/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3237FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3238/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3239FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3240/* Opcode 0xf3 0x0f 0x63 - invalid */
3241
3242/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3243FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3244/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3245FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3246/* Opcode 0xf3 0x0f 0x64 - invalid */
3247
3248/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3249FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3250/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3251FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3252/* Opcode 0xf3 0x0f 0x65 - invalid */
3253
3254/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3255FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3256/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3257FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3258/* Opcode 0xf3 0x0f 0x66 - invalid */
3259
3260/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3261FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3262/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3263FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3264/* Opcode 0xf3 0x0f 0x67 - invalid */
3265
3266
3267/**
3268 * Common worker for MMX instructions on the form:
3269 * pxxxx mm1, mm2/mem64
3270 *
3271 * The 2nd operand is the second half of a register, which in the memory case
3272 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3273 * where it may read the full 128 bits or only the upper 64 bits.
3274 *
3275 * Exceptions type 4.
3276 */
3277FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3278{
3279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3280 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3281 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3282 {
3283 /*
3284 * Register, register.
3285 */
3286 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3287 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3289 IEM_MC_BEGIN(2, 0);
3290 IEM_MC_ARG(uint64_t *, pDst, 0);
3291 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3292 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3293 IEM_MC_PREPARE_FPU_USAGE();
3294 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3295 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3296 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3297 IEM_MC_ADVANCE_RIP();
3298 IEM_MC_END();
3299 }
3300 else
3301 {
3302 /*
3303 * Register, memory.
3304 */
3305 IEM_MC_BEGIN(2, 2);
3306 IEM_MC_ARG(uint64_t *, pDst, 0);
3307 IEM_MC_LOCAL(uint64_t, uSrc);
3308 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3310
3311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3313 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3314 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3315
3316 IEM_MC_PREPARE_FPU_USAGE();
3317 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3318 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3319
3320 IEM_MC_ADVANCE_RIP();
3321 IEM_MC_END();
3322 }
3323 return VINF_SUCCESS;
3324}
3325
3326
3327/**
3328 * Common worker for SSE2 instructions on the form:
3329 * pxxxx xmm1, xmm2/mem128
3330 *
3331 * The 2nd operand is the second half of a register, which in the memory case
3332 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3333 * where it may read the full 128 bits or only the upper 64 bits.
3334 *
3335 * Exceptions type 4.
3336 */
3337FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3338{
3339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3340 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3341 {
3342 /*
3343 * Register, register.
3344 */
3345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3346 IEM_MC_BEGIN(2, 0);
3347 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3348 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3349 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3350 IEM_MC_PREPARE_SSE_USAGE();
3351 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3352 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3353 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3354 IEM_MC_ADVANCE_RIP();
3355 IEM_MC_END();
3356 }
3357 else
3358 {
3359 /*
3360 * Register, memory.
3361 */
3362 IEM_MC_BEGIN(2, 2);
3363 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3364 IEM_MC_LOCAL(RTUINT128U, uSrc);
3365 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3367
3368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3370 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3371 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3372
3373 IEM_MC_PREPARE_SSE_USAGE();
3374 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3375 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3376
3377 IEM_MC_ADVANCE_RIP();
3378 IEM_MC_END();
3379 }
3380 return VINF_SUCCESS;
3381}
3382
3383
3384/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3385FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3386{
3387 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3388 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3389}
3390
3391/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3392FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3393{
3394 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3395 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3396}
3397/* Opcode 0xf3 0x0f 0x68 - invalid */
3398
3399
3400/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3401FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3402{
3403 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3404 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3405}
3406
3407/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3408FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3409{
3410 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3411 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3412
3413}
3414/* Opcode 0xf3 0x0f 0x69 - invalid */
3415
3416
3417/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3418FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3419{
3420 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3421 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3422}
3423
3424/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3425FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3426{
3427 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3428 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3429}
3430/* Opcode 0xf3 0x0f 0x6a - invalid */
3431
3432
3433/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3434FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3435/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3436FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3437/* Opcode 0xf3 0x0f 0x6b - invalid */
3438
3439
3440/* Opcode 0x0f 0x6c - invalid */
3441
3442/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3443FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3444{
3445 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3446 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3447}
3448
3449/* Opcode 0xf3 0x0f 0x6c - invalid */
3450/* Opcode 0xf2 0x0f 0x6c - invalid */
3451
3452
3453/* Opcode 0x0f 0x6d - invalid */
3454
3455/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3456FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3457{
3458 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3459 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3460}
3461
3462/* Opcode 0xf3 0x0f 0x6d - invalid */
3463
3464
3465FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3466{
3467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3468 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3469 {
3470 /**
3471 * @opcode 0x6e
3472 * @opcodesub rex.w=1
3473 * @oppfx none
3474 * @opcpuid mmx
3475 * @opgroup og_mmx_datamove
3476 * @opxcpttype 5
3477 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3478 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3479 */
3480 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3482 {
3483 /* MMX, greg64 */
3484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3485 IEM_MC_BEGIN(0, 1);
3486 IEM_MC_LOCAL(uint64_t, u64Tmp);
3487
3488 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3489 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3490
3491 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3492 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3493 IEM_MC_FPU_TO_MMX_MODE();
3494
3495 IEM_MC_ADVANCE_RIP();
3496 IEM_MC_END();
3497 }
3498 else
3499 {
3500 /* MMX, [mem64] */
3501 IEM_MC_BEGIN(0, 2);
3502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3503 IEM_MC_LOCAL(uint64_t, u64Tmp);
3504
3505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3507 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3508 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3509
3510 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3511 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3512 IEM_MC_FPU_TO_MMX_MODE();
3513
3514 IEM_MC_ADVANCE_RIP();
3515 IEM_MC_END();
3516 }
3517 }
3518 else
3519 {
3520 /**
3521 * @opdone
3522 * @opcode 0x6e
3523 * @opcodesub rex.w=0
3524 * @oppfx none
3525 * @opcpuid mmx
3526 * @opgroup og_mmx_datamove
3527 * @opxcpttype 5
3528 * @opfunction iemOp_movd_q_Pd_Ey
3529 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3530 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3531 */
3532 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3534 {
3535 /* MMX, greg */
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537 IEM_MC_BEGIN(0, 1);
3538 IEM_MC_LOCAL(uint64_t, u64Tmp);
3539
3540 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3541 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3542
3543 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3544 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3545 IEM_MC_FPU_TO_MMX_MODE();
3546
3547 IEM_MC_ADVANCE_RIP();
3548 IEM_MC_END();
3549 }
3550 else
3551 {
3552 /* MMX, [mem] */
3553 IEM_MC_BEGIN(0, 2);
3554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3555 IEM_MC_LOCAL(uint32_t, u32Tmp);
3556
3557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3559 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3560 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3561
3562 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3563 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3564 IEM_MC_FPU_TO_MMX_MODE();
3565
3566 IEM_MC_ADVANCE_RIP();
3567 IEM_MC_END();
3568 }
3569 }
3570 return VINF_SUCCESS;
3571}
3572
3573FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3574{
3575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3576 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3577 {
3578 /**
3579 * @opcode 0x6e
3580 * @opcodesub rex.w=1
3581 * @oppfx 0x66
3582 * @opcpuid sse2
3583 * @opgroup og_sse2_simdint_datamove
3584 * @opxcpttype 5
3585 * @optest 64-bit / op1=1 op2=2 -> op1=2
3586 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3587 */
3588 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3590 {
3591 /* XMM, greg64 */
3592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3593 IEM_MC_BEGIN(0, 1);
3594 IEM_MC_LOCAL(uint64_t, u64Tmp);
3595
3596 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3597 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3598
3599 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3600 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3601
3602 IEM_MC_ADVANCE_RIP();
3603 IEM_MC_END();
3604 }
3605 else
3606 {
3607 /* XMM, [mem64] */
3608 IEM_MC_BEGIN(0, 2);
3609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3610 IEM_MC_LOCAL(uint64_t, u64Tmp);
3611
3612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3614 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3615 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3616
3617 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3618 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3619
3620 IEM_MC_ADVANCE_RIP();
3621 IEM_MC_END();
3622 }
3623 }
3624 else
3625 {
3626 /**
3627 * @opdone
3628 * @opcode 0x6e
3629 * @opcodesub rex.w=0
3630 * @oppfx 0x66
3631 * @opcpuid sse2
3632 * @opgroup og_sse2_simdint_datamove
3633 * @opxcpttype 5
3634 * @opfunction iemOp_movd_q_Vy_Ey
3635 * @optest op1=1 op2=2 -> op1=2
3636 * @optest op1=0 op2=-42 -> op1=-42
3637 */
3638 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3640 {
3641 /* XMM, greg32 */
3642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3643 IEM_MC_BEGIN(0, 1);
3644 IEM_MC_LOCAL(uint32_t, u32Tmp);
3645
3646 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3647 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3648
3649 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3650 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3651
3652 IEM_MC_ADVANCE_RIP();
3653 IEM_MC_END();
3654 }
3655 else
3656 {
3657 /* XMM, [mem32] */
3658 IEM_MC_BEGIN(0, 2);
3659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3660 IEM_MC_LOCAL(uint32_t, u32Tmp);
3661
3662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3664 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3665 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3666
3667 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3668 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3669
3670 IEM_MC_ADVANCE_RIP();
3671 IEM_MC_END();
3672 }
3673 }
3674 return VINF_SUCCESS;
3675}
3676
3677/* Opcode 0xf3 0x0f 0x6e - invalid */
3678
3679
3680/**
3681 * @opcode 0x6f
3682 * @oppfx none
3683 * @opcpuid mmx
3684 * @opgroup og_mmx_datamove
3685 * @opxcpttype 5
3686 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3687 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3688 */
3689FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3690{
3691 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3693 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3694 {
3695 /*
3696 * Register, register.
3697 */
3698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3699 IEM_MC_BEGIN(0, 1);
3700 IEM_MC_LOCAL(uint64_t, u64Tmp);
3701
3702 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3703 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3704
3705 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3706 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3707 IEM_MC_FPU_TO_MMX_MODE();
3708
3709 IEM_MC_ADVANCE_RIP();
3710 IEM_MC_END();
3711 }
3712 else
3713 {
3714 /*
3715 * Register, memory.
3716 */
3717 IEM_MC_BEGIN(0, 2);
3718 IEM_MC_LOCAL(uint64_t, u64Tmp);
3719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3720
3721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3723 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3724 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3725
3726 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3727 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3728 IEM_MC_FPU_TO_MMX_MODE();
3729
3730 IEM_MC_ADVANCE_RIP();
3731 IEM_MC_END();
3732 }
3733 return VINF_SUCCESS;
3734}
3735
3736/**
3737 * @opcode 0x6f
3738 * @oppfx 0x66
3739 * @opcpuid sse2
3740 * @opgroup og_sse2_simdint_datamove
3741 * @opxcpttype 1
3742 * @optest op1=1 op2=2 -> op1=2
3743 * @optest op1=0 op2=-42 -> op1=-42
3744 */
3745FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3746{
3747 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3749 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3750 {
3751 /*
3752 * Register, register.
3753 */
3754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3755 IEM_MC_BEGIN(0, 0);
3756
3757 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3758 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3759
3760 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3761 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3762 IEM_MC_ADVANCE_RIP();
3763 IEM_MC_END();
3764 }
3765 else
3766 {
3767 /*
3768 * Register, memory.
3769 */
3770 IEM_MC_BEGIN(0, 2);
3771 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3773
3774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3777 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3778
3779 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3780 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3781
3782 IEM_MC_ADVANCE_RIP();
3783 IEM_MC_END();
3784 }
3785 return VINF_SUCCESS;
3786}
3787
3788/**
3789 * @opcode 0x6f
3790 * @oppfx 0xf3
3791 * @opcpuid sse2
3792 * @opgroup og_sse2_simdint_datamove
3793 * @opxcpttype 4UA
3794 * @optest op1=1 op2=2 -> op1=2
3795 * @optest op1=0 op2=-42 -> op1=-42
3796 */
3797FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3798{
3799 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3802 {
3803 /*
3804 * Register, register.
3805 */
3806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3807 IEM_MC_BEGIN(0, 0);
3808 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3809 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3810 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3811 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3812 IEM_MC_ADVANCE_RIP();
3813 IEM_MC_END();
3814 }
3815 else
3816 {
3817 /*
3818 * Register, memory.
3819 */
3820 IEM_MC_BEGIN(0, 2);
3821 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3823
3824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3826 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3827 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3828 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3829 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3830
3831 IEM_MC_ADVANCE_RIP();
3832 IEM_MC_END();
3833 }
3834 return VINF_SUCCESS;
3835}
3836
3837
3838/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3839FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3840{
3841 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3843 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3844 {
3845 /*
3846 * Register, register.
3847 */
3848 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3850
3851 IEM_MC_BEGIN(3, 0);
3852 IEM_MC_ARG(uint64_t *, pDst, 0);
3853 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3854 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3855 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3856 IEM_MC_PREPARE_FPU_USAGE();
3857 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3858 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3859 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3860 IEM_MC_ADVANCE_RIP();
3861 IEM_MC_END();
3862 }
3863 else
3864 {
3865 /*
3866 * Register, memory.
3867 */
3868 IEM_MC_BEGIN(3, 2);
3869 IEM_MC_ARG(uint64_t *, pDst, 0);
3870 IEM_MC_LOCAL(uint64_t, uSrc);
3871 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3873
3874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3875 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3876 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3878 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3879
3880 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3881 IEM_MC_PREPARE_FPU_USAGE();
3882 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3883 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3884
3885 IEM_MC_ADVANCE_RIP();
3886 IEM_MC_END();
3887 }
3888 return VINF_SUCCESS;
3889}
3890
3891/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3892FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3893{
3894 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3896 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3897 {
3898 /*
3899 * Register, register.
3900 */
3901 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3903
3904 IEM_MC_BEGIN(3, 0);
3905 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3906 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3907 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3908 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3909 IEM_MC_PREPARE_SSE_USAGE();
3910 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3911 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3912 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3913 IEM_MC_ADVANCE_RIP();
3914 IEM_MC_END();
3915 }
3916 else
3917 {
3918 /*
3919 * Register, memory.
3920 */
3921 IEM_MC_BEGIN(3, 2);
3922 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3923 IEM_MC_LOCAL(RTUINT128U, uSrc);
3924 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3926
3927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3928 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3929 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3931 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3932
3933 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3934 IEM_MC_PREPARE_SSE_USAGE();
3935 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3936 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3937
3938 IEM_MC_ADVANCE_RIP();
3939 IEM_MC_END();
3940 }
3941 return VINF_SUCCESS;
3942}
3943
3944/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3945FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3946{
3947 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3949 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3950 {
3951 /*
3952 * Register, register.
3953 */
3954 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3956
3957 IEM_MC_BEGIN(3, 0);
3958 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3959 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3960 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3961 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3962 IEM_MC_PREPARE_SSE_USAGE();
3963 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3964 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3965 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3966 IEM_MC_ADVANCE_RIP();
3967 IEM_MC_END();
3968 }
3969 else
3970 {
3971 /*
3972 * Register, memory.
3973 */
3974 IEM_MC_BEGIN(3, 2);
3975 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3976 IEM_MC_LOCAL(RTUINT128U, uSrc);
3977 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3979
3980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3981 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3982 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3984 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3985
3986 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3987 IEM_MC_PREPARE_SSE_USAGE();
3988 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3989 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3990
3991 IEM_MC_ADVANCE_RIP();
3992 IEM_MC_END();
3993 }
3994 return VINF_SUCCESS;
3995}
3996
3997/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3998FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3999{
4000 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
4001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4002 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4003 {
4004 /*
4005 * Register, register.
4006 */
4007 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4009
4010 IEM_MC_BEGIN(3, 0);
4011 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4012 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4013 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4014 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4015 IEM_MC_PREPARE_SSE_USAGE();
4016 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4017 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4018 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4019 IEM_MC_ADVANCE_RIP();
4020 IEM_MC_END();
4021 }
4022 else
4023 {
4024 /*
4025 * Register, memory.
4026 */
4027 IEM_MC_BEGIN(3, 2);
4028 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4029 IEM_MC_LOCAL(RTUINT128U, uSrc);
4030 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4032
4033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4034 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4035 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4037 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4038
4039 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4040 IEM_MC_PREPARE_SSE_USAGE();
4041 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4042 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4043
4044 IEM_MC_ADVANCE_RIP();
4045 IEM_MC_END();
4046 }
4047 return VINF_SUCCESS;
4048}
4049
4050
4051/** Opcode 0x0f 0x71 11/2. */
4052FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
4053
4054/** Opcode 0x66 0x0f 0x71 11/2. */
4055FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
4056
4057/** Opcode 0x0f 0x71 11/4. */
4058FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4059
4060/** Opcode 0x66 0x0f 0x71 11/4. */
4061FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4062
4063/** Opcode 0x0f 0x71 11/6. */
4064FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4065
4066/** Opcode 0x66 0x0f 0x71 11/6. */
4067FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4068
4069
4070/**
4071 * Group 12 jump table for register variant.
4072 */
4073IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4074{
4075 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4076 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4077 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4078 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4079 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4080 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4081 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4082 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4083};
4084AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4085
4086
4087/** Opcode 0x0f 0x71. */
4088FNIEMOP_DEF(iemOp_Grp12)
4089{
4090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4092 /* register, register */
4093 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4094 + pVCpu->iem.s.idxPrefix], bRm);
4095 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4096}
4097
4098
4099/** Opcode 0x0f 0x72 11/2. */
4100FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4101
4102/** Opcode 0x66 0x0f 0x72 11/2. */
4103FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4104
4105/** Opcode 0x0f 0x72 11/4. */
4106FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4107
4108/** Opcode 0x66 0x0f 0x72 11/4. */
4109FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4110
4111/** Opcode 0x0f 0x72 11/6. */
4112FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4113
4114/** Opcode 0x66 0x0f 0x72 11/6. */
4115FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4116
4117
4118/**
4119 * Group 13 jump table for register variant.
4120 */
4121IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4122{
4123 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4124 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4125 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4126 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4127 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4128 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4129 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4130 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4131};
4132AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4133
4134/** Opcode 0x0f 0x72. */
4135FNIEMOP_DEF(iemOp_Grp13)
4136{
4137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4139 /* register, register */
4140 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4141 + pVCpu->iem.s.idxPrefix], bRm);
4142 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4143}
4144
4145
4146/** Opcode 0x0f 0x73 11/2. */
4147FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4148
4149/** Opcode 0x66 0x0f 0x73 11/2. */
4150FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4151
4152/** Opcode 0x66 0x0f 0x73 11/3. */
4153FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4154
4155/** Opcode 0x0f 0x73 11/6. */
4156FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4157
4158/** Opcode 0x66 0x0f 0x73 11/6. */
4159FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4160
4161/** Opcode 0x66 0x0f 0x73 11/7. */
4162FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4163
4164/**
4165 * Group 14 jump table for register variant.
4166 */
4167IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4168{
4169 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4170 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4171 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4172 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4173 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4174 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4175 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4176 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4177};
4178AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4179
4180
4181/** Opcode 0x0f 0x73. */
4182FNIEMOP_DEF(iemOp_Grp14)
4183{
4184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4186 /* register, register */
4187 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4188 + pVCpu->iem.s.idxPrefix], bRm);
4189 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4190}
4191
4192
4193/**
4194 * Common worker for MMX instructions on the form:
4195 * pxxx mm1, mm2/mem64
4196 */
4197FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4198{
4199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4201 {
4202 /*
4203 * Register, register.
4204 */
4205 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4206 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4208 IEM_MC_BEGIN(2, 0);
4209 IEM_MC_ARG(uint64_t *, pDst, 0);
4210 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4211 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4212 IEM_MC_PREPARE_FPU_USAGE();
4213 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4214 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4215 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4216 IEM_MC_ADVANCE_RIP();
4217 IEM_MC_END();
4218 }
4219 else
4220 {
4221 /*
4222 * Register, memory.
4223 */
4224 IEM_MC_BEGIN(2, 2);
4225 IEM_MC_ARG(uint64_t *, pDst, 0);
4226 IEM_MC_LOCAL(uint64_t, uSrc);
4227 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4229
4230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4232 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4233 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4234
4235 IEM_MC_PREPARE_FPU_USAGE();
4236 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4237 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4238
4239 IEM_MC_ADVANCE_RIP();
4240 IEM_MC_END();
4241 }
4242 return VINF_SUCCESS;
4243}
4244
4245
4246/**
4247 * Common worker for SSE2 instructions on the forms:
4248 * pxxx xmm1, xmm2/mem128
4249 *
4250 * Proper alignment of the 128-bit operand is enforced.
4251 * Exceptions type 4. SSE2 cpuid checks.
4252 */
4253FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4254{
4255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4256 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4257 {
4258 /*
4259 * Register, register.
4260 */
4261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4262 IEM_MC_BEGIN(2, 0);
4263 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4264 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4265 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4266 IEM_MC_PREPARE_SSE_USAGE();
4267 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4268 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4269 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4270 IEM_MC_ADVANCE_RIP();
4271 IEM_MC_END();
4272 }
4273 else
4274 {
4275 /*
4276 * Register, memory.
4277 */
4278 IEM_MC_BEGIN(2, 2);
4279 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4280 IEM_MC_LOCAL(RTUINT128U, uSrc);
4281 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4283
4284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4286 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4287 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4288
4289 IEM_MC_PREPARE_SSE_USAGE();
4290 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4291 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4292
4293 IEM_MC_ADVANCE_RIP();
4294 IEM_MC_END();
4295 }
4296 return VINF_SUCCESS;
4297}
4298
4299
4300/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4301FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4302{
4303 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4304 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4305}
4306
4307/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4308FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4309{
4310 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4311 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4312}
4313
4314/* Opcode 0xf3 0x0f 0x74 - invalid */
4315/* Opcode 0xf2 0x0f 0x74 - invalid */
4316
4317
4318/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4319FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4320{
4321 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4322 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4323}
4324
4325/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4326FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4327{
4328 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4329 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4330}
4331
4332/* Opcode 0xf3 0x0f 0x75 - invalid */
4333/* Opcode 0xf2 0x0f 0x75 - invalid */
4334
4335
4336/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4337FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4338{
4339 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4340 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4341}
4342
4343/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4344FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4345{
4346 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4347 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4348}
4349
4350/* Opcode 0xf3 0x0f 0x76 - invalid */
4351/* Opcode 0xf2 0x0f 0x76 - invalid */
4352
4353
4354/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4355FNIEMOP_DEF(iemOp_emms)
4356{
4357 IEMOP_MNEMONIC(emms, "emms");
4358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4359
4360 IEM_MC_BEGIN(0,0);
4361 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4362 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4363 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4364 IEM_MC_FPU_FROM_MMX_MODE();
4365 IEM_MC_ADVANCE_RIP();
4366 IEM_MC_END();
4367 return VINF_SUCCESS;
4368}
4369
4370/* Opcode 0x66 0x0f 0x77 - invalid */
4371/* Opcode 0xf3 0x0f 0x77 - invalid */
4372/* Opcode 0xf2 0x0f 0x77 - invalid */
4373
4374/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4375FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4376/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4377FNIEMOP_STUB(iemOp_AmdGrp17);
4378/* Opcode 0xf3 0x0f 0x78 - invalid */
4379/* Opcode 0xf2 0x0f 0x78 - invalid */
4380
4381/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4382FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4383/* Opcode 0x66 0x0f 0x79 - invalid */
4384/* Opcode 0xf3 0x0f 0x79 - invalid */
4385/* Opcode 0xf2 0x0f 0x79 - invalid */
4386
4387/* Opcode 0x0f 0x7a - invalid */
4388/* Opcode 0x66 0x0f 0x7a - invalid */
4389/* Opcode 0xf3 0x0f 0x7a - invalid */
4390/* Opcode 0xf2 0x0f 0x7a - invalid */
4391
4392/* Opcode 0x0f 0x7b - invalid */
4393/* Opcode 0x66 0x0f 0x7b - invalid */
4394/* Opcode 0xf3 0x0f 0x7b - invalid */
4395/* Opcode 0xf2 0x0f 0x7b - invalid */
4396
4397/* Opcode 0x0f 0x7c - invalid */
4398/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4399FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4400/* Opcode 0xf3 0x0f 0x7c - invalid */
4401/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4402FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4403
4404/* Opcode 0x0f 0x7d - invalid */
4405/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4406FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4407/* Opcode 0xf3 0x0f 0x7d - invalid */
4408/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4409FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4410
4411
4412/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4413FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4414{
4415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4416 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4417 {
4418 /**
4419 * @opcode 0x7e
4420 * @opcodesub rex.w=1
4421 * @oppfx none
4422 * @opcpuid mmx
4423 * @opgroup og_mmx_datamove
4424 * @opxcpttype 5
4425 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4426 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4427 */
4428 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4430 {
4431 /* greg64, MMX */
4432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4433 IEM_MC_BEGIN(0, 1);
4434 IEM_MC_LOCAL(uint64_t, u64Tmp);
4435
4436 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4437 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4438
4439 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4440 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4441 IEM_MC_FPU_TO_MMX_MODE();
4442
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 }
4446 else
4447 {
4448 /* [mem64], MMX */
4449 IEM_MC_BEGIN(0, 2);
4450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4451 IEM_MC_LOCAL(uint64_t, u64Tmp);
4452
4453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4455 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4456 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4457
4458 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4459 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4460 IEM_MC_FPU_TO_MMX_MODE();
4461
4462 IEM_MC_ADVANCE_RIP();
4463 IEM_MC_END();
4464 }
4465 }
4466 else
4467 {
4468 /**
4469 * @opdone
4470 * @opcode 0x7e
4471 * @opcodesub rex.w=0
4472 * @oppfx none
4473 * @opcpuid mmx
4474 * @opgroup og_mmx_datamove
4475 * @opxcpttype 5
4476 * @opfunction iemOp_movd_q_Pd_Ey
4477 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4478 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4479 */
4480 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4482 {
4483 /* greg32, MMX */
4484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4485 IEM_MC_BEGIN(0, 1);
4486 IEM_MC_LOCAL(uint32_t, u32Tmp);
4487
4488 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4489 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4490
4491 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4492 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4493 IEM_MC_FPU_TO_MMX_MODE();
4494
4495 IEM_MC_ADVANCE_RIP();
4496 IEM_MC_END();
4497 }
4498 else
4499 {
4500 /* [mem32], MMX */
4501 IEM_MC_BEGIN(0, 2);
4502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4503 IEM_MC_LOCAL(uint32_t, u32Tmp);
4504
4505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4507 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4508 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4509
4510 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4511 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4512 IEM_MC_FPU_TO_MMX_MODE();
4513
4514 IEM_MC_ADVANCE_RIP();
4515 IEM_MC_END();
4516 }
4517 }
4518 return VINF_SUCCESS;
4519
4520}
4521
4522
4523FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4524{
4525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4526 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4527 {
4528 /**
4529 * @opcode 0x7e
4530 * @opcodesub rex.w=1
4531 * @oppfx 0x66
4532 * @opcpuid sse2
4533 * @opgroup og_sse2_simdint_datamove
4534 * @opxcpttype 5
4535 * @optest 64-bit / op1=1 op2=2 -> op1=2
4536 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4537 */
4538 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4540 {
4541 /* greg64, XMM */
4542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4543 IEM_MC_BEGIN(0, 1);
4544 IEM_MC_LOCAL(uint64_t, u64Tmp);
4545
4546 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4547 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4548
4549 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4550 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4551
4552 IEM_MC_ADVANCE_RIP();
4553 IEM_MC_END();
4554 }
4555 else
4556 {
4557 /* [mem64], XMM */
4558 IEM_MC_BEGIN(0, 2);
4559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4560 IEM_MC_LOCAL(uint64_t, u64Tmp);
4561
4562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4564 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4565 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4566
4567 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4568 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4569
4570 IEM_MC_ADVANCE_RIP();
4571 IEM_MC_END();
4572 }
4573 }
4574 else
4575 {
4576 /**
4577 * @opdone
4578 * @opcode 0x7e
4579 * @opcodesub rex.w=0
4580 * @oppfx 0x66
4581 * @opcpuid sse2
4582 * @opgroup og_sse2_simdint_datamove
4583 * @opxcpttype 5
4584 * @opfunction iemOp_movd_q_Vy_Ey
4585 * @optest op1=1 op2=2 -> op1=2
4586 * @optest op1=0 op2=-42 -> op1=-42
4587 */
4588 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4590 {
4591 /* greg32, XMM */
4592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4593 IEM_MC_BEGIN(0, 1);
4594 IEM_MC_LOCAL(uint32_t, u32Tmp);
4595
4596 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4597 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4598
4599 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4600 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4601
4602 IEM_MC_ADVANCE_RIP();
4603 IEM_MC_END();
4604 }
4605 else
4606 {
4607 /* [mem32], XMM */
4608 IEM_MC_BEGIN(0, 2);
4609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4610 IEM_MC_LOCAL(uint32_t, u32Tmp);
4611
4612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4614 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4615 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4616
4617 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4618 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4619
4620 IEM_MC_ADVANCE_RIP();
4621 IEM_MC_END();
4622 }
4623 }
4624 return VINF_SUCCESS;
4625
4626}
4627
4628/**
4629 * @opcode 0x7e
4630 * @oppfx 0xf3
4631 * @opcpuid sse2
4632 * @opgroup og_sse2_pcksclr_datamove
4633 * @opxcpttype none
4634 * @optest op1=1 op2=2 -> op1=2
4635 * @optest op1=0 op2=-42 -> op1=-42
4636 */
4637FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4638{
4639 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4642 {
4643 /*
4644 * Register, register.
4645 */
4646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4647 IEM_MC_BEGIN(0, 2);
4648 IEM_MC_LOCAL(uint64_t, uSrc);
4649
4650 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4651 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4652
4653 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4654 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4655
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 }
4659 else
4660 {
4661 /*
4662 * Memory, register.
4663 */
4664 IEM_MC_BEGIN(0, 2);
4665 IEM_MC_LOCAL(uint64_t, uSrc);
4666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4667
4668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4670 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4671 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4672
4673 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4674 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4675
4676 IEM_MC_ADVANCE_RIP();
4677 IEM_MC_END();
4678 }
4679 return VINF_SUCCESS;
4680}
4681
4682/* Opcode 0xf2 0x0f 0x7e - invalid */
4683
4684
4685/** Opcode 0x0f 0x7f - movq Qq, Pq */
4686FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4687{
4688 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4691 {
4692 /*
4693 * Register, register.
4694 */
4695 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4696 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 IEM_MC_BEGIN(0, 1);
4699 IEM_MC_LOCAL(uint64_t, u64Tmp);
4700 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4701 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4702 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4703 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4704 IEM_MC_ADVANCE_RIP();
4705 IEM_MC_END();
4706 }
4707 else
4708 {
4709 /*
4710 * Register, memory.
4711 */
4712 IEM_MC_BEGIN(0, 2);
4713 IEM_MC_LOCAL(uint64_t, u64Tmp);
4714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4715
4716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4718 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4719 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4720
4721 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4722 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4723
4724 IEM_MC_ADVANCE_RIP();
4725 IEM_MC_END();
4726 }
4727 return VINF_SUCCESS;
4728}
4729
4730/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4731FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4732{
4733 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4736 {
4737 /*
4738 * Register, register.
4739 */
4740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4741 IEM_MC_BEGIN(0, 0);
4742 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4743 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4744 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4745 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4746 IEM_MC_ADVANCE_RIP();
4747 IEM_MC_END();
4748 }
4749 else
4750 {
4751 /*
4752 * Register, memory.
4753 */
4754 IEM_MC_BEGIN(0, 2);
4755 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4757
4758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4760 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4761 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4762
4763 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4764 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4765
4766 IEM_MC_ADVANCE_RIP();
4767 IEM_MC_END();
4768 }
4769 return VINF_SUCCESS;
4770}
4771
4772/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4773FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4774{
4775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4776 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4778 {
4779 /*
4780 * Register, register.
4781 */
4782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4783 IEM_MC_BEGIN(0, 0);
4784 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4785 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4786 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4787 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 }
4791 else
4792 {
4793 /*
4794 * Register, memory.
4795 */
4796 IEM_MC_BEGIN(0, 2);
4797 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4799
4800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4802 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4803 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4804
4805 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4806 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4807
4808 IEM_MC_ADVANCE_RIP();
4809 IEM_MC_END();
4810 }
4811 return VINF_SUCCESS;
4812}
4813
4814/* Opcode 0xf2 0x0f 0x7f - invalid */
4815
4816
4817
4818/** Opcode 0x0f 0x80. */
4819FNIEMOP_DEF(iemOp_jo_Jv)
4820{
4821 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4822 IEMOP_HLP_MIN_386();
4823 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4824 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4825 {
4826 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4828
4829 IEM_MC_BEGIN(0, 0);
4830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4831 IEM_MC_REL_JMP_S16(i16Imm);
4832 } IEM_MC_ELSE() {
4833 IEM_MC_ADVANCE_RIP();
4834 } IEM_MC_ENDIF();
4835 IEM_MC_END();
4836 }
4837 else
4838 {
4839 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4841
4842 IEM_MC_BEGIN(0, 0);
4843 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4844 IEM_MC_REL_JMP_S32(i32Imm);
4845 } IEM_MC_ELSE() {
4846 IEM_MC_ADVANCE_RIP();
4847 } IEM_MC_ENDIF();
4848 IEM_MC_END();
4849 }
4850 return VINF_SUCCESS;
4851}
4852
4853
4854/** Opcode 0x0f 0x81. */
4855FNIEMOP_DEF(iemOp_jno_Jv)
4856{
4857 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4858 IEMOP_HLP_MIN_386();
4859 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4860 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4861 {
4862 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4864
4865 IEM_MC_BEGIN(0, 0);
4866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4867 IEM_MC_ADVANCE_RIP();
4868 } IEM_MC_ELSE() {
4869 IEM_MC_REL_JMP_S16(i16Imm);
4870 } IEM_MC_ENDIF();
4871 IEM_MC_END();
4872 }
4873 else
4874 {
4875 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4877
4878 IEM_MC_BEGIN(0, 0);
4879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4880 IEM_MC_ADVANCE_RIP();
4881 } IEM_MC_ELSE() {
4882 IEM_MC_REL_JMP_S32(i32Imm);
4883 } IEM_MC_ENDIF();
4884 IEM_MC_END();
4885 }
4886 return VINF_SUCCESS;
4887}
4888
4889
4890/** Opcode 0x0f 0x82. */
4891FNIEMOP_DEF(iemOp_jc_Jv)
4892{
4893 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4894 IEMOP_HLP_MIN_386();
4895 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4896 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4897 {
4898 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4900
4901 IEM_MC_BEGIN(0, 0);
4902 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4903 IEM_MC_REL_JMP_S16(i16Imm);
4904 } IEM_MC_ELSE() {
4905 IEM_MC_ADVANCE_RIP();
4906 } IEM_MC_ENDIF();
4907 IEM_MC_END();
4908 }
4909 else
4910 {
4911 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4913
4914 IEM_MC_BEGIN(0, 0);
4915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4916 IEM_MC_REL_JMP_S32(i32Imm);
4917 } IEM_MC_ELSE() {
4918 IEM_MC_ADVANCE_RIP();
4919 } IEM_MC_ENDIF();
4920 IEM_MC_END();
4921 }
4922 return VINF_SUCCESS;
4923}
4924
4925
4926/** Opcode 0x0f 0x83. */
4927FNIEMOP_DEF(iemOp_jnc_Jv)
4928{
4929 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4930 IEMOP_HLP_MIN_386();
4931 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4932 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4933 {
4934 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4936
4937 IEM_MC_BEGIN(0, 0);
4938 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4939 IEM_MC_ADVANCE_RIP();
4940 } IEM_MC_ELSE() {
4941 IEM_MC_REL_JMP_S16(i16Imm);
4942 } IEM_MC_ENDIF();
4943 IEM_MC_END();
4944 }
4945 else
4946 {
4947 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4949
4950 IEM_MC_BEGIN(0, 0);
4951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4952 IEM_MC_ADVANCE_RIP();
4953 } IEM_MC_ELSE() {
4954 IEM_MC_REL_JMP_S32(i32Imm);
4955 } IEM_MC_ENDIF();
4956 IEM_MC_END();
4957 }
4958 return VINF_SUCCESS;
4959}
4960
4961
4962/** Opcode 0x0f 0x84. */
4963FNIEMOP_DEF(iemOp_je_Jv)
4964{
4965 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4966 IEMOP_HLP_MIN_386();
4967 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4968 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4969 {
4970 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4972
4973 IEM_MC_BEGIN(0, 0);
4974 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4975 IEM_MC_REL_JMP_S16(i16Imm);
4976 } IEM_MC_ELSE() {
4977 IEM_MC_ADVANCE_RIP();
4978 } IEM_MC_ENDIF();
4979 IEM_MC_END();
4980 }
4981 else
4982 {
4983 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4985
4986 IEM_MC_BEGIN(0, 0);
4987 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4988 IEM_MC_REL_JMP_S32(i32Imm);
4989 } IEM_MC_ELSE() {
4990 IEM_MC_ADVANCE_RIP();
4991 } IEM_MC_ENDIF();
4992 IEM_MC_END();
4993 }
4994 return VINF_SUCCESS;
4995}
4996
4997
4998/** Opcode 0x0f 0x85. */
4999FNIEMOP_DEF(iemOp_jne_Jv)
5000{
5001 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5002 IEMOP_HLP_MIN_386();
5003 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5004 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5005 {
5006 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5008
5009 IEM_MC_BEGIN(0, 0);
5010 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5011 IEM_MC_ADVANCE_RIP();
5012 } IEM_MC_ELSE() {
5013 IEM_MC_REL_JMP_S16(i16Imm);
5014 } IEM_MC_ENDIF();
5015 IEM_MC_END();
5016 }
5017 else
5018 {
5019 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5021
5022 IEM_MC_BEGIN(0, 0);
5023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5024 IEM_MC_ADVANCE_RIP();
5025 } IEM_MC_ELSE() {
5026 IEM_MC_REL_JMP_S32(i32Imm);
5027 } IEM_MC_ENDIF();
5028 IEM_MC_END();
5029 }
5030 return VINF_SUCCESS;
5031}
5032
5033
5034/** Opcode 0x0f 0x86. */
5035FNIEMOP_DEF(iemOp_jbe_Jv)
5036{
5037 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5038 IEMOP_HLP_MIN_386();
5039 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5040 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5041 {
5042 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5044
5045 IEM_MC_BEGIN(0, 0);
5046 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5047 IEM_MC_REL_JMP_S16(i16Imm);
5048 } IEM_MC_ELSE() {
5049 IEM_MC_ADVANCE_RIP();
5050 } IEM_MC_ENDIF();
5051 IEM_MC_END();
5052 }
5053 else
5054 {
5055 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5057
5058 IEM_MC_BEGIN(0, 0);
5059 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5060 IEM_MC_REL_JMP_S32(i32Imm);
5061 } IEM_MC_ELSE() {
5062 IEM_MC_ADVANCE_RIP();
5063 } IEM_MC_ENDIF();
5064 IEM_MC_END();
5065 }
5066 return VINF_SUCCESS;
5067}
5068
5069
5070/** Opcode 0x0f 0x87. */
5071FNIEMOP_DEF(iemOp_jnbe_Jv)
5072{
5073 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5074 IEMOP_HLP_MIN_386();
5075 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5076 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5077 {
5078 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5080
5081 IEM_MC_BEGIN(0, 0);
5082 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5083 IEM_MC_ADVANCE_RIP();
5084 } IEM_MC_ELSE() {
5085 IEM_MC_REL_JMP_S16(i16Imm);
5086 } IEM_MC_ENDIF();
5087 IEM_MC_END();
5088 }
5089 else
5090 {
5091 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5093
5094 IEM_MC_BEGIN(0, 0);
5095 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5096 IEM_MC_ADVANCE_RIP();
5097 } IEM_MC_ELSE() {
5098 IEM_MC_REL_JMP_S32(i32Imm);
5099 } IEM_MC_ENDIF();
5100 IEM_MC_END();
5101 }
5102 return VINF_SUCCESS;
5103}
5104
5105
5106/** Opcode 0x0f 0x88. */
5107FNIEMOP_DEF(iemOp_js_Jv)
5108{
5109 IEMOP_MNEMONIC(js_Jv, "js Jv");
5110 IEMOP_HLP_MIN_386();
5111 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5112 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5113 {
5114 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5116
5117 IEM_MC_BEGIN(0, 0);
5118 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5119 IEM_MC_REL_JMP_S16(i16Imm);
5120 } IEM_MC_ELSE() {
5121 IEM_MC_ADVANCE_RIP();
5122 } IEM_MC_ENDIF();
5123 IEM_MC_END();
5124 }
5125 else
5126 {
5127 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5129
5130 IEM_MC_BEGIN(0, 0);
5131 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5132 IEM_MC_REL_JMP_S32(i32Imm);
5133 } IEM_MC_ELSE() {
5134 IEM_MC_ADVANCE_RIP();
5135 } IEM_MC_ENDIF();
5136 IEM_MC_END();
5137 }
5138 return VINF_SUCCESS;
5139}
5140
5141
5142/** Opcode 0x0f 0x89. */
5143FNIEMOP_DEF(iemOp_jns_Jv)
5144{
5145 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5146 IEMOP_HLP_MIN_386();
5147 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5148 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5149 {
5150 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5152
5153 IEM_MC_BEGIN(0, 0);
5154 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5155 IEM_MC_ADVANCE_RIP();
5156 } IEM_MC_ELSE() {
5157 IEM_MC_REL_JMP_S16(i16Imm);
5158 } IEM_MC_ENDIF();
5159 IEM_MC_END();
5160 }
5161 else
5162 {
5163 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165
5166 IEM_MC_BEGIN(0, 0);
5167 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5168 IEM_MC_ADVANCE_RIP();
5169 } IEM_MC_ELSE() {
5170 IEM_MC_REL_JMP_S32(i32Imm);
5171 } IEM_MC_ENDIF();
5172 IEM_MC_END();
5173 }
5174 return VINF_SUCCESS;
5175}
5176
5177
5178/** Opcode 0x0f 0x8a. */
5179FNIEMOP_DEF(iemOp_jp_Jv)
5180{
5181 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5182 IEMOP_HLP_MIN_386();
5183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5184 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5185 {
5186 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5188
5189 IEM_MC_BEGIN(0, 0);
5190 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5191 IEM_MC_REL_JMP_S16(i16Imm);
5192 } IEM_MC_ELSE() {
5193 IEM_MC_ADVANCE_RIP();
5194 } IEM_MC_ENDIF();
5195 IEM_MC_END();
5196 }
5197 else
5198 {
5199 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5201
5202 IEM_MC_BEGIN(0, 0);
5203 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5204 IEM_MC_REL_JMP_S32(i32Imm);
5205 } IEM_MC_ELSE() {
5206 IEM_MC_ADVANCE_RIP();
5207 } IEM_MC_ENDIF();
5208 IEM_MC_END();
5209 }
5210 return VINF_SUCCESS;
5211}
5212
5213
5214/** Opcode 0x0f 0x8b. */
5215FNIEMOP_DEF(iemOp_jnp_Jv)
5216{
5217 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5218 IEMOP_HLP_MIN_386();
5219 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5220 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5221 {
5222 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5224
5225 IEM_MC_BEGIN(0, 0);
5226 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5227 IEM_MC_ADVANCE_RIP();
5228 } IEM_MC_ELSE() {
5229 IEM_MC_REL_JMP_S16(i16Imm);
5230 } IEM_MC_ENDIF();
5231 IEM_MC_END();
5232 }
5233 else
5234 {
5235 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237
5238 IEM_MC_BEGIN(0, 0);
5239 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5240 IEM_MC_ADVANCE_RIP();
5241 } IEM_MC_ELSE() {
5242 IEM_MC_REL_JMP_S32(i32Imm);
5243 } IEM_MC_ENDIF();
5244 IEM_MC_END();
5245 }
5246 return VINF_SUCCESS;
5247}
5248
5249
5250/** Opcode 0x0f 0x8c. */
5251FNIEMOP_DEF(iemOp_jl_Jv)
5252{
5253 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5254 IEMOP_HLP_MIN_386();
5255 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5256 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5257 {
5258 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5260
5261 IEM_MC_BEGIN(0, 0);
5262 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5263 IEM_MC_REL_JMP_S16(i16Imm);
5264 } IEM_MC_ELSE() {
5265 IEM_MC_ADVANCE_RIP();
5266 } IEM_MC_ENDIF();
5267 IEM_MC_END();
5268 }
5269 else
5270 {
5271 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5273
5274 IEM_MC_BEGIN(0, 0);
5275 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5276 IEM_MC_REL_JMP_S32(i32Imm);
5277 } IEM_MC_ELSE() {
5278 IEM_MC_ADVANCE_RIP();
5279 } IEM_MC_ENDIF();
5280 IEM_MC_END();
5281 }
5282 return VINF_SUCCESS;
5283}
5284
5285
5286/** Opcode 0x0f 0x8d. */
5287FNIEMOP_DEF(iemOp_jnl_Jv)
5288{
5289 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5290 IEMOP_HLP_MIN_386();
5291 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5292 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5293 {
5294 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5296
5297 IEM_MC_BEGIN(0, 0);
5298 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5299 IEM_MC_ADVANCE_RIP();
5300 } IEM_MC_ELSE() {
5301 IEM_MC_REL_JMP_S16(i16Imm);
5302 } IEM_MC_ENDIF();
5303 IEM_MC_END();
5304 }
5305 else
5306 {
5307 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5309
5310 IEM_MC_BEGIN(0, 0);
5311 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5312 IEM_MC_ADVANCE_RIP();
5313 } IEM_MC_ELSE() {
5314 IEM_MC_REL_JMP_S32(i32Imm);
5315 } IEM_MC_ENDIF();
5316 IEM_MC_END();
5317 }
5318 return VINF_SUCCESS;
5319}
5320
5321
5322/** Opcode 0x0f 0x8e. */
5323FNIEMOP_DEF(iemOp_jle_Jv)
5324{
5325 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5326 IEMOP_HLP_MIN_386();
5327 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5328 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5329 {
5330 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5332
5333 IEM_MC_BEGIN(0, 0);
5334 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5335 IEM_MC_REL_JMP_S16(i16Imm);
5336 } IEM_MC_ELSE() {
5337 IEM_MC_ADVANCE_RIP();
5338 } IEM_MC_ENDIF();
5339 IEM_MC_END();
5340 }
5341 else
5342 {
5343 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5345
5346 IEM_MC_BEGIN(0, 0);
5347 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5348 IEM_MC_REL_JMP_S32(i32Imm);
5349 } IEM_MC_ELSE() {
5350 IEM_MC_ADVANCE_RIP();
5351 } IEM_MC_ENDIF();
5352 IEM_MC_END();
5353 }
5354 return VINF_SUCCESS;
5355}
5356
5357
5358/** Opcode 0x0f 0x8f. */
5359FNIEMOP_DEF(iemOp_jnle_Jv)
5360{
5361 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5362 IEMOP_HLP_MIN_386();
5363 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5364 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5365 {
5366 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5368
5369 IEM_MC_BEGIN(0, 0);
5370 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5371 IEM_MC_ADVANCE_RIP();
5372 } IEM_MC_ELSE() {
5373 IEM_MC_REL_JMP_S16(i16Imm);
5374 } IEM_MC_ENDIF();
5375 IEM_MC_END();
5376 }
5377 else
5378 {
5379 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5381
5382 IEM_MC_BEGIN(0, 0);
5383 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5384 IEM_MC_ADVANCE_RIP();
5385 } IEM_MC_ELSE() {
5386 IEM_MC_REL_JMP_S32(i32Imm);
5387 } IEM_MC_ENDIF();
5388 IEM_MC_END();
5389 }
5390 return VINF_SUCCESS;
5391}
5392
5393
5394/** Opcode 0x0f 0x90. */
5395FNIEMOP_DEF(iemOp_seto_Eb)
5396{
5397 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5398 IEMOP_HLP_MIN_386();
5399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5400
5401 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5402 * any way. AMD says it's "unused", whatever that means. We're
5403 * ignoring for now. */
5404 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5405 {
5406 /* register target */
5407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5408 IEM_MC_BEGIN(0, 0);
5409 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5410 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5411 } IEM_MC_ELSE() {
5412 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5413 } IEM_MC_ENDIF();
5414 IEM_MC_ADVANCE_RIP();
5415 IEM_MC_END();
5416 }
5417 else
5418 {
5419 /* memory target */
5420 IEM_MC_BEGIN(0, 1);
5421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5424 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5425 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5426 } IEM_MC_ELSE() {
5427 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5428 } IEM_MC_ENDIF();
5429 IEM_MC_ADVANCE_RIP();
5430 IEM_MC_END();
5431 }
5432 return VINF_SUCCESS;
5433}
5434
5435
5436/** Opcode 0x0f 0x91. */
5437FNIEMOP_DEF(iemOp_setno_Eb)
5438{
5439 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5440 IEMOP_HLP_MIN_386();
5441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5442
5443 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5444 * any way. AMD says it's "unused", whatever that means. We're
5445 * ignoring for now. */
5446 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5447 {
5448 /* register target */
5449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5450 IEM_MC_BEGIN(0, 0);
5451 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5452 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5453 } IEM_MC_ELSE() {
5454 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5455 } IEM_MC_ENDIF();
5456 IEM_MC_ADVANCE_RIP();
5457 IEM_MC_END();
5458 }
5459 else
5460 {
5461 /* memory target */
5462 IEM_MC_BEGIN(0, 1);
5463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5466 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5467 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5468 } IEM_MC_ELSE() {
5469 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5470 } IEM_MC_ENDIF();
5471 IEM_MC_ADVANCE_RIP();
5472 IEM_MC_END();
5473 }
5474 return VINF_SUCCESS;
5475}
5476
5477
5478/** Opcode 0x0f 0x92. */
5479FNIEMOP_DEF(iemOp_setc_Eb)
5480{
5481 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5482 IEMOP_HLP_MIN_386();
5483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5484
5485 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5486 * any way. AMD says it's "unused", whatever that means. We're
5487 * ignoring for now. */
5488 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5489 {
5490 /* register target */
5491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5492 IEM_MC_BEGIN(0, 0);
5493 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5494 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5495 } IEM_MC_ELSE() {
5496 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5497 } IEM_MC_ENDIF();
5498 IEM_MC_ADVANCE_RIP();
5499 IEM_MC_END();
5500 }
5501 else
5502 {
5503 /* memory target */
5504 IEM_MC_BEGIN(0, 1);
5505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5508 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5509 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5510 } IEM_MC_ELSE() {
5511 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5512 } IEM_MC_ENDIF();
5513 IEM_MC_ADVANCE_RIP();
5514 IEM_MC_END();
5515 }
5516 return VINF_SUCCESS;
5517}
5518
5519
5520/** Opcode 0x0f 0x93. */
5521FNIEMOP_DEF(iemOp_setnc_Eb)
5522{
5523 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5524 IEMOP_HLP_MIN_386();
5525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5526
5527 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5528 * any way. AMD says it's "unused", whatever that means. We're
5529 * ignoring for now. */
5530 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5531 {
5532 /* register target */
5533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5534 IEM_MC_BEGIN(0, 0);
5535 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5536 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5537 } IEM_MC_ELSE() {
5538 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5539 } IEM_MC_ENDIF();
5540 IEM_MC_ADVANCE_RIP();
5541 IEM_MC_END();
5542 }
5543 else
5544 {
5545 /* memory target */
5546 IEM_MC_BEGIN(0, 1);
5547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5550 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5551 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5552 } IEM_MC_ELSE() {
5553 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5554 } IEM_MC_ENDIF();
5555 IEM_MC_ADVANCE_RIP();
5556 IEM_MC_END();
5557 }
5558 return VINF_SUCCESS;
5559}
5560
5561
5562/** Opcode 0x0f 0x94. */
5563FNIEMOP_DEF(iemOp_sete_Eb)
5564{
5565 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5566 IEMOP_HLP_MIN_386();
5567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5568
5569 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5570 * any way. AMD says it's "unused", whatever that means. We're
5571 * ignoring for now. */
5572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5573 {
5574 /* register target */
5575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5576 IEM_MC_BEGIN(0, 0);
5577 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5578 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5579 } IEM_MC_ELSE() {
5580 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5581 } IEM_MC_ENDIF();
5582 IEM_MC_ADVANCE_RIP();
5583 IEM_MC_END();
5584 }
5585 else
5586 {
5587 /* memory target */
5588 IEM_MC_BEGIN(0, 1);
5589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5592 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5593 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5594 } IEM_MC_ELSE() {
5595 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5596 } IEM_MC_ENDIF();
5597 IEM_MC_ADVANCE_RIP();
5598 IEM_MC_END();
5599 }
5600 return VINF_SUCCESS;
5601}
5602
5603
5604/** Opcode 0x0f 0x95. */
5605FNIEMOP_DEF(iemOp_setne_Eb)
5606{
5607 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5608 IEMOP_HLP_MIN_386();
5609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5610
5611 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5612 * any way. AMD says it's "unused", whatever that means. We're
5613 * ignoring for now. */
5614 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5615 {
5616 /* register target */
5617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5618 IEM_MC_BEGIN(0, 0);
5619 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5620 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5621 } IEM_MC_ELSE() {
5622 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5623 } IEM_MC_ENDIF();
5624 IEM_MC_ADVANCE_RIP();
5625 IEM_MC_END();
5626 }
5627 else
5628 {
5629 /* memory target */
5630 IEM_MC_BEGIN(0, 1);
5631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5635 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5636 } IEM_MC_ELSE() {
5637 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5638 } IEM_MC_ENDIF();
5639 IEM_MC_ADVANCE_RIP();
5640 IEM_MC_END();
5641 }
5642 return VINF_SUCCESS;
5643}
5644
5645
5646/** Opcode 0x0f 0x96. */
5647FNIEMOP_DEF(iemOp_setbe_Eb)
5648{
5649 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5650 IEMOP_HLP_MIN_386();
5651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5652
5653 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5654 * any way. AMD says it's "unused", whatever that means. We're
5655 * ignoring for now. */
5656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5657 {
5658 /* register target */
5659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5660 IEM_MC_BEGIN(0, 0);
5661 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5662 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5663 } IEM_MC_ELSE() {
5664 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5665 } IEM_MC_ENDIF();
5666 IEM_MC_ADVANCE_RIP();
5667 IEM_MC_END();
5668 }
5669 else
5670 {
5671 /* memory target */
5672 IEM_MC_BEGIN(0, 1);
5673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5676 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5677 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5678 } IEM_MC_ELSE() {
5679 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5680 } IEM_MC_ENDIF();
5681 IEM_MC_ADVANCE_RIP();
5682 IEM_MC_END();
5683 }
5684 return VINF_SUCCESS;
5685}
5686
5687
5688/** Opcode 0x0f 0x97. */
5689FNIEMOP_DEF(iemOp_setnbe_Eb)
5690{
5691 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5692 IEMOP_HLP_MIN_386();
5693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5694
5695 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5696 * any way. AMD says it's "unused", whatever that means. We're
5697 * ignoring for now. */
5698 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5699 {
5700 /* register target */
5701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5702 IEM_MC_BEGIN(0, 0);
5703 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5704 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5705 } IEM_MC_ELSE() {
5706 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5707 } IEM_MC_ENDIF();
5708 IEM_MC_ADVANCE_RIP();
5709 IEM_MC_END();
5710 }
5711 else
5712 {
5713 /* memory target */
5714 IEM_MC_BEGIN(0, 1);
5715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5719 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5720 } IEM_MC_ELSE() {
5721 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5722 } IEM_MC_ENDIF();
5723 IEM_MC_ADVANCE_RIP();
5724 IEM_MC_END();
5725 }
5726 return VINF_SUCCESS;
5727}
5728
5729
5730/** Opcode 0x0f 0x98. */
5731FNIEMOP_DEF(iemOp_sets_Eb)
5732{
5733 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5734 IEMOP_HLP_MIN_386();
5735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5736
5737 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5738 * any way. AMD says it's "unused", whatever that means. We're
5739 * ignoring for now. */
5740 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5741 {
5742 /* register target */
5743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5744 IEM_MC_BEGIN(0, 0);
5745 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5746 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5747 } IEM_MC_ELSE() {
5748 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5749 } IEM_MC_ENDIF();
5750 IEM_MC_ADVANCE_RIP();
5751 IEM_MC_END();
5752 }
5753 else
5754 {
5755 /* memory target */
5756 IEM_MC_BEGIN(0, 1);
5757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5761 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5762 } IEM_MC_ELSE() {
5763 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5764 } IEM_MC_ENDIF();
5765 IEM_MC_ADVANCE_RIP();
5766 IEM_MC_END();
5767 }
5768 return VINF_SUCCESS;
5769}
5770
5771
5772/** Opcode 0x0f 0x99. */
5773FNIEMOP_DEF(iemOp_setns_Eb)
5774{
5775 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5776 IEMOP_HLP_MIN_386();
5777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5778
5779 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5780 * any way. AMD says it's "unused", whatever that means. We're
5781 * ignoring for now. */
5782 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5783 {
5784 /* register target */
5785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5786 IEM_MC_BEGIN(0, 0);
5787 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5788 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5789 } IEM_MC_ELSE() {
5790 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5791 } IEM_MC_ENDIF();
5792 IEM_MC_ADVANCE_RIP();
5793 IEM_MC_END();
5794 }
5795 else
5796 {
5797 /* memory target */
5798 IEM_MC_BEGIN(0, 1);
5799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5802 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5803 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5804 } IEM_MC_ELSE() {
5805 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5806 } IEM_MC_ENDIF();
5807 IEM_MC_ADVANCE_RIP();
5808 IEM_MC_END();
5809 }
5810 return VINF_SUCCESS;
5811}
5812
5813
5814/** Opcode 0x0f 0x9a. */
5815FNIEMOP_DEF(iemOp_setp_Eb)
5816{
5817 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5818 IEMOP_HLP_MIN_386();
5819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5820
5821 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5822 * any way. AMD says it's "unused", whatever that means. We're
5823 * ignoring for now. */
5824 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5825 {
5826 /* register target */
5827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5828 IEM_MC_BEGIN(0, 0);
5829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5830 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5831 } IEM_MC_ELSE() {
5832 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5833 } IEM_MC_ENDIF();
5834 IEM_MC_ADVANCE_RIP();
5835 IEM_MC_END();
5836 }
5837 else
5838 {
5839 /* memory target */
5840 IEM_MC_BEGIN(0, 1);
5841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5845 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5846 } IEM_MC_ELSE() {
5847 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5848 } IEM_MC_ENDIF();
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 }
5852 return VINF_SUCCESS;
5853}
5854
5855
5856/** Opcode 0x0f 0x9b. */
5857FNIEMOP_DEF(iemOp_setnp_Eb)
5858{
5859 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5860 IEMOP_HLP_MIN_386();
5861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5862
5863 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5864 * any way. AMD says it's "unused", whatever that means. We're
5865 * ignoring for now. */
5866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5867 {
5868 /* register target */
5869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5870 IEM_MC_BEGIN(0, 0);
5871 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5872 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5873 } IEM_MC_ELSE() {
5874 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5875 } IEM_MC_ENDIF();
5876 IEM_MC_ADVANCE_RIP();
5877 IEM_MC_END();
5878 }
5879 else
5880 {
5881 /* memory target */
5882 IEM_MC_BEGIN(0, 1);
5883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5887 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5888 } IEM_MC_ELSE() {
5889 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5890 } IEM_MC_ENDIF();
5891 IEM_MC_ADVANCE_RIP();
5892 IEM_MC_END();
5893 }
5894 return VINF_SUCCESS;
5895}
5896
5897
5898/** Opcode 0x0f 0x9c. */
5899FNIEMOP_DEF(iemOp_setl_Eb)
5900{
5901 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5902 IEMOP_HLP_MIN_386();
5903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5904
5905 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5906 * any way. AMD says it's "unused", whatever that means. We're
5907 * ignoring for now. */
5908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5909 {
5910 /* register target */
5911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5912 IEM_MC_BEGIN(0, 0);
5913 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5914 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5915 } IEM_MC_ELSE() {
5916 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5917 } IEM_MC_ENDIF();
5918 IEM_MC_ADVANCE_RIP();
5919 IEM_MC_END();
5920 }
5921 else
5922 {
5923 /* memory target */
5924 IEM_MC_BEGIN(0, 1);
5925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5928 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5929 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5930 } IEM_MC_ELSE() {
5931 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5932 } IEM_MC_ENDIF();
5933 IEM_MC_ADVANCE_RIP();
5934 IEM_MC_END();
5935 }
5936 return VINF_SUCCESS;
5937}
5938
5939
5940/** Opcode 0x0f 0x9d. */
5941FNIEMOP_DEF(iemOp_setnl_Eb)
5942{
5943 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5944 IEMOP_HLP_MIN_386();
5945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5946
5947 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5948 * any way. AMD says it's "unused", whatever that means. We're
5949 * ignoring for now. */
5950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5951 {
5952 /* register target */
5953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5954 IEM_MC_BEGIN(0, 0);
5955 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5956 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5957 } IEM_MC_ELSE() {
5958 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5959 } IEM_MC_ENDIF();
5960 IEM_MC_ADVANCE_RIP();
5961 IEM_MC_END();
5962 }
5963 else
5964 {
5965 /* memory target */
5966 IEM_MC_BEGIN(0, 1);
5967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5970 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5971 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5972 } IEM_MC_ELSE() {
5973 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5974 } IEM_MC_ENDIF();
5975 IEM_MC_ADVANCE_RIP();
5976 IEM_MC_END();
5977 }
5978 return VINF_SUCCESS;
5979}
5980
5981
5982/** Opcode 0x0f 0x9e. */
5983FNIEMOP_DEF(iemOp_setle_Eb)
5984{
5985 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5986 IEMOP_HLP_MIN_386();
5987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5988
5989 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5990 * any way. AMD says it's "unused", whatever that means. We're
5991 * ignoring for now. */
5992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5993 {
5994 /* register target */
5995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5996 IEM_MC_BEGIN(0, 0);
5997 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5998 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5999 } IEM_MC_ELSE() {
6000 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6001 } IEM_MC_ENDIF();
6002 IEM_MC_ADVANCE_RIP();
6003 IEM_MC_END();
6004 }
6005 else
6006 {
6007 /* memory target */
6008 IEM_MC_BEGIN(0, 1);
6009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6012 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6013 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6014 } IEM_MC_ELSE() {
6015 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6016 } IEM_MC_ENDIF();
6017 IEM_MC_ADVANCE_RIP();
6018 IEM_MC_END();
6019 }
6020 return VINF_SUCCESS;
6021}
6022
6023
6024/** Opcode 0x0f 0x9f. */
6025FNIEMOP_DEF(iemOp_setnle_Eb)
6026{
6027 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6028 IEMOP_HLP_MIN_386();
6029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6030
6031 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6032 * any way. AMD says it's "unused", whatever that means. We're
6033 * ignoring for now. */
6034 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6035 {
6036 /* register target */
6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6038 IEM_MC_BEGIN(0, 0);
6039 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6040 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6041 } IEM_MC_ELSE() {
6042 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6043 } IEM_MC_ENDIF();
6044 IEM_MC_ADVANCE_RIP();
6045 IEM_MC_END();
6046 }
6047 else
6048 {
6049 /* memory target */
6050 IEM_MC_BEGIN(0, 1);
6051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6054 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6055 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6056 } IEM_MC_ELSE() {
6057 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6058 } IEM_MC_ENDIF();
6059 IEM_MC_ADVANCE_RIP();
6060 IEM_MC_END();
6061 }
6062 return VINF_SUCCESS;
6063}
6064
6065
6066/**
6067 * Common 'push segment-register' helper.
6068 */
6069FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6070{
6071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6072 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6073 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6074
6075 switch (pVCpu->iem.s.enmEffOpSize)
6076 {
6077 case IEMMODE_16BIT:
6078 IEM_MC_BEGIN(0, 1);
6079 IEM_MC_LOCAL(uint16_t, u16Value);
6080 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6081 IEM_MC_PUSH_U16(u16Value);
6082 IEM_MC_ADVANCE_RIP();
6083 IEM_MC_END();
6084 break;
6085
6086 case IEMMODE_32BIT:
6087 IEM_MC_BEGIN(0, 1);
6088 IEM_MC_LOCAL(uint32_t, u32Value);
6089 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6090 IEM_MC_PUSH_U32_SREG(u32Value);
6091 IEM_MC_ADVANCE_RIP();
6092 IEM_MC_END();
6093 break;
6094
6095 case IEMMODE_64BIT:
6096 IEM_MC_BEGIN(0, 1);
6097 IEM_MC_LOCAL(uint64_t, u64Value);
6098 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6099 IEM_MC_PUSH_U64(u64Value);
6100 IEM_MC_ADVANCE_RIP();
6101 IEM_MC_END();
6102 break;
6103 }
6104
6105 return VINF_SUCCESS;
6106}
6107
6108
6109/** Opcode 0x0f 0xa0. */
6110FNIEMOP_DEF(iemOp_push_fs)
6111{
6112 IEMOP_MNEMONIC(push_fs, "push fs");
6113 IEMOP_HLP_MIN_386();
6114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6115 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6116}
6117
6118
6119/** Opcode 0x0f 0xa1. */
6120FNIEMOP_DEF(iemOp_pop_fs)
6121{
6122 IEMOP_MNEMONIC(pop_fs, "pop fs");
6123 IEMOP_HLP_MIN_386();
6124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6125 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6126}
6127
6128
6129/** Opcode 0x0f 0xa2. */
6130FNIEMOP_DEF(iemOp_cpuid)
6131{
6132 IEMOP_MNEMONIC(cpuid, "cpuid");
6133 IEMOP_HLP_MIN_486(); /* not all 486es. */
6134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6135 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6136}
6137
6138
6139/**
6140 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6141 * iemOp_bts_Ev_Gv.
6142 */
6143FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6144{
6145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6146 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6147
6148 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6149 {
6150 /* register destination. */
6151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6152 switch (pVCpu->iem.s.enmEffOpSize)
6153 {
6154 case IEMMODE_16BIT:
6155 IEM_MC_BEGIN(3, 0);
6156 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6157 IEM_MC_ARG(uint16_t, u16Src, 1);
6158 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6159
6160 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6161 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6162 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6163 IEM_MC_REF_EFLAGS(pEFlags);
6164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6165
6166 IEM_MC_ADVANCE_RIP();
6167 IEM_MC_END();
6168 return VINF_SUCCESS;
6169
6170 case IEMMODE_32BIT:
6171 IEM_MC_BEGIN(3, 0);
6172 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6173 IEM_MC_ARG(uint32_t, u32Src, 1);
6174 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6175
6176 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6177 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6178 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6179 IEM_MC_REF_EFLAGS(pEFlags);
6180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6181
6182 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6183 IEM_MC_ADVANCE_RIP();
6184 IEM_MC_END();
6185 return VINF_SUCCESS;
6186
6187 case IEMMODE_64BIT:
6188 IEM_MC_BEGIN(3, 0);
6189 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6190 IEM_MC_ARG(uint64_t, u64Src, 1);
6191 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6192
6193 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6194 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6195 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6196 IEM_MC_REF_EFLAGS(pEFlags);
6197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6198
6199 IEM_MC_ADVANCE_RIP();
6200 IEM_MC_END();
6201 return VINF_SUCCESS;
6202
6203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6204 }
6205 }
6206 else
6207 {
6208 /* memory destination. */
6209
6210 uint32_t fAccess;
6211 if (pImpl->pfnLockedU16)
6212 fAccess = IEM_ACCESS_DATA_RW;
6213 else /* BT */
6214 fAccess = IEM_ACCESS_DATA_R;
6215
6216 /** @todo test negative bit offsets! */
6217 switch (pVCpu->iem.s.enmEffOpSize)
6218 {
6219 case IEMMODE_16BIT:
6220 IEM_MC_BEGIN(3, 2);
6221 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6222 IEM_MC_ARG(uint16_t, u16Src, 1);
6223 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6225 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6226
6227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6228 if (pImpl->pfnLockedU16)
6229 IEMOP_HLP_DONE_DECODING();
6230 else
6231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6232 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6233 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6234 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6235 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6236 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6237 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6238 IEM_MC_FETCH_EFLAGS(EFlags);
6239
6240 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6241 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6242 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6243 else
6244 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6245 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6246
6247 IEM_MC_COMMIT_EFLAGS(EFlags);
6248 IEM_MC_ADVANCE_RIP();
6249 IEM_MC_END();
6250 return VINF_SUCCESS;
6251
6252 case IEMMODE_32BIT:
6253 IEM_MC_BEGIN(3, 2);
6254 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6255 IEM_MC_ARG(uint32_t, u32Src, 1);
6256 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6258 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6259
6260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6261 if (pImpl->pfnLockedU16)
6262 IEMOP_HLP_DONE_DECODING();
6263 else
6264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6265 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6266 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6267 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6268 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6269 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6270 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6271 IEM_MC_FETCH_EFLAGS(EFlags);
6272
6273 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6274 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6275 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6276 else
6277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6278 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6279
6280 IEM_MC_COMMIT_EFLAGS(EFlags);
6281 IEM_MC_ADVANCE_RIP();
6282 IEM_MC_END();
6283 return VINF_SUCCESS;
6284
6285 case IEMMODE_64BIT:
6286 IEM_MC_BEGIN(3, 2);
6287 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6288 IEM_MC_ARG(uint64_t, u64Src, 1);
6289 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6291 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6292
6293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6294 if (pImpl->pfnLockedU16)
6295 IEMOP_HLP_DONE_DECODING();
6296 else
6297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6298 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6299 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6300 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6301 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6302 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6303 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6304 IEM_MC_FETCH_EFLAGS(EFlags);
6305
6306 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6307 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6309 else
6310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6311 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6312
6313 IEM_MC_COMMIT_EFLAGS(EFlags);
6314 IEM_MC_ADVANCE_RIP();
6315 IEM_MC_END();
6316 return VINF_SUCCESS;
6317
6318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6319 }
6320 }
6321}
6322
6323
6324/** Opcode 0x0f 0xa3. */
6325FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6326{
6327 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6328 IEMOP_HLP_MIN_386();
6329 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6330}
6331
6332
6333/**
6334 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6335 */
6336FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6337{
6338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6339 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6340
6341 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6342 {
6343 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6345
6346 switch (pVCpu->iem.s.enmEffOpSize)
6347 {
6348 case IEMMODE_16BIT:
6349 IEM_MC_BEGIN(4, 0);
6350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6351 IEM_MC_ARG(uint16_t, u16Src, 1);
6352 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6353 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6354
6355 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6356 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6357 IEM_MC_REF_EFLAGS(pEFlags);
6358 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6359
6360 IEM_MC_ADVANCE_RIP();
6361 IEM_MC_END();
6362 return VINF_SUCCESS;
6363
6364 case IEMMODE_32BIT:
6365 IEM_MC_BEGIN(4, 0);
6366 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6367 IEM_MC_ARG(uint32_t, u32Src, 1);
6368 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6369 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6370
6371 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6372 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6373 IEM_MC_REF_EFLAGS(pEFlags);
6374 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6375
6376 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6377 IEM_MC_ADVANCE_RIP();
6378 IEM_MC_END();
6379 return VINF_SUCCESS;
6380
6381 case IEMMODE_64BIT:
6382 IEM_MC_BEGIN(4, 0);
6383 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6384 IEM_MC_ARG(uint64_t, u64Src, 1);
6385 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6386 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6387
6388 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6389 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6390 IEM_MC_REF_EFLAGS(pEFlags);
6391 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6392
6393 IEM_MC_ADVANCE_RIP();
6394 IEM_MC_END();
6395 return VINF_SUCCESS;
6396
6397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6398 }
6399 }
6400 else
6401 {
6402 switch (pVCpu->iem.s.enmEffOpSize)
6403 {
6404 case IEMMODE_16BIT:
6405 IEM_MC_BEGIN(4, 2);
6406 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6407 IEM_MC_ARG(uint16_t, u16Src, 1);
6408 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6409 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6411
6412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6413 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6414 IEM_MC_ASSIGN(cShiftArg, cShift);
6415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6416 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6417 IEM_MC_FETCH_EFLAGS(EFlags);
6418 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6419 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6420
6421 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6422 IEM_MC_COMMIT_EFLAGS(EFlags);
6423 IEM_MC_ADVANCE_RIP();
6424 IEM_MC_END();
6425 return VINF_SUCCESS;
6426
6427 case IEMMODE_32BIT:
6428 IEM_MC_BEGIN(4, 2);
6429 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6430 IEM_MC_ARG(uint32_t, u32Src, 1);
6431 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6432 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6434
6435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6436 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6437 IEM_MC_ASSIGN(cShiftArg, cShift);
6438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6439 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6440 IEM_MC_FETCH_EFLAGS(EFlags);
6441 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6442 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6443
6444 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6445 IEM_MC_COMMIT_EFLAGS(EFlags);
6446 IEM_MC_ADVANCE_RIP();
6447 IEM_MC_END();
6448 return VINF_SUCCESS;
6449
6450 case IEMMODE_64BIT:
6451 IEM_MC_BEGIN(4, 2);
6452 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6453 IEM_MC_ARG(uint64_t, u64Src, 1);
6454 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6455 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6457
6458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6459 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6460 IEM_MC_ASSIGN(cShiftArg, cShift);
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6463 IEM_MC_FETCH_EFLAGS(EFlags);
6464 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6465 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6466
6467 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6468 IEM_MC_COMMIT_EFLAGS(EFlags);
6469 IEM_MC_ADVANCE_RIP();
6470 IEM_MC_END();
6471 return VINF_SUCCESS;
6472
6473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6474 }
6475 }
6476}
6477
6478
6479/**
6480 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6481 */
6482FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6483{
6484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6485 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6486
6487 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6488 {
6489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6490
6491 switch (pVCpu->iem.s.enmEffOpSize)
6492 {
6493 case IEMMODE_16BIT:
6494 IEM_MC_BEGIN(4, 0);
6495 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6496 IEM_MC_ARG(uint16_t, u16Src, 1);
6497 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6498 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6499
6500 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6501 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6502 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6503 IEM_MC_REF_EFLAGS(pEFlags);
6504 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6505
6506 IEM_MC_ADVANCE_RIP();
6507 IEM_MC_END();
6508 return VINF_SUCCESS;
6509
6510 case IEMMODE_32BIT:
6511 IEM_MC_BEGIN(4, 0);
6512 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6513 IEM_MC_ARG(uint32_t, u32Src, 1);
6514 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6515 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6516
6517 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6518 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6519 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6520 IEM_MC_REF_EFLAGS(pEFlags);
6521 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6522
6523 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6524 IEM_MC_ADVANCE_RIP();
6525 IEM_MC_END();
6526 return VINF_SUCCESS;
6527
6528 case IEMMODE_64BIT:
6529 IEM_MC_BEGIN(4, 0);
6530 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6531 IEM_MC_ARG(uint64_t, u64Src, 1);
6532 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6533 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6534
6535 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6536 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6537 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6538 IEM_MC_REF_EFLAGS(pEFlags);
6539 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6540
6541 IEM_MC_ADVANCE_RIP();
6542 IEM_MC_END();
6543 return VINF_SUCCESS;
6544
6545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6546 }
6547 }
6548 else
6549 {
6550 switch (pVCpu->iem.s.enmEffOpSize)
6551 {
6552 case IEMMODE_16BIT:
6553 IEM_MC_BEGIN(4, 2);
6554 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6555 IEM_MC_ARG(uint16_t, u16Src, 1);
6556 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6557 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6559
6560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6562 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6563 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6564 IEM_MC_FETCH_EFLAGS(EFlags);
6565 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6566 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6567
6568 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6569 IEM_MC_COMMIT_EFLAGS(EFlags);
6570 IEM_MC_ADVANCE_RIP();
6571 IEM_MC_END();
6572 return VINF_SUCCESS;
6573
6574 case IEMMODE_32BIT:
6575 IEM_MC_BEGIN(4, 2);
6576 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6577 IEM_MC_ARG(uint32_t, u32Src, 1);
6578 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6581
6582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6584 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6585 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6586 IEM_MC_FETCH_EFLAGS(EFlags);
6587 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6588 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6589
6590 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6591 IEM_MC_COMMIT_EFLAGS(EFlags);
6592 IEM_MC_ADVANCE_RIP();
6593 IEM_MC_END();
6594 return VINF_SUCCESS;
6595
6596 case IEMMODE_64BIT:
6597 IEM_MC_BEGIN(4, 2);
6598 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6599 IEM_MC_ARG(uint64_t, u64Src, 1);
6600 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6601 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6603
6604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6606 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6607 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6608 IEM_MC_FETCH_EFLAGS(EFlags);
6609 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6610 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6611
6612 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6613 IEM_MC_COMMIT_EFLAGS(EFlags);
6614 IEM_MC_ADVANCE_RIP();
6615 IEM_MC_END();
6616 return VINF_SUCCESS;
6617
6618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6619 }
6620 }
6621}
6622
6623
6624
6625/** Opcode 0x0f 0xa4. */
6626FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6627{
6628 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6629 IEMOP_HLP_MIN_386();
6630 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6631}
6632
6633
6634/** Opcode 0x0f 0xa5. */
6635FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6636{
6637 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6638 IEMOP_HLP_MIN_386();
6639 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6640}
6641
6642
6643/** Opcode 0x0f 0xa8. */
6644FNIEMOP_DEF(iemOp_push_gs)
6645{
6646 IEMOP_MNEMONIC(push_gs, "push gs");
6647 IEMOP_HLP_MIN_386();
6648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6649 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6650}
6651
6652
6653/** Opcode 0x0f 0xa9. */
6654FNIEMOP_DEF(iemOp_pop_gs)
6655{
6656 IEMOP_MNEMONIC(pop_gs, "pop gs");
6657 IEMOP_HLP_MIN_386();
6658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6659 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6660}
6661
6662
6663/** Opcode 0x0f 0xaa. */
6664FNIEMOP_DEF(iemOp_rsm)
6665{
6666 IEMOP_MNEMONIC(rsm, "rsm");
6667 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6668 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6669 * intercept). */
6670 IEMOP_BITCH_ABOUT_STUB();
6671 return IEMOP_RAISE_INVALID_OPCODE();
6672}
6673
6674//IEMOP_HLP_MIN_386();
6675
6676
6677/** Opcode 0x0f 0xab. */
6678FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6679{
6680 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6681 IEMOP_HLP_MIN_386();
6682 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6683}
6684
6685
6686/** Opcode 0x0f 0xac. */
6687FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6688{
6689 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6690 IEMOP_HLP_MIN_386();
6691 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6692}
6693
6694
6695/** Opcode 0x0f 0xad. */
6696FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6697{
6698 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6699 IEMOP_HLP_MIN_386();
6700 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6701}
6702
6703
6704/** Opcode 0x0f 0xae mem/0. */
6705FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6706{
6707 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6708 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6709 return IEMOP_RAISE_INVALID_OPCODE();
6710
6711 IEM_MC_BEGIN(3, 1);
6712 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6713 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6714 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6717 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6718 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6719 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6720 IEM_MC_END();
6721 return VINF_SUCCESS;
6722}
6723
6724
6725/** Opcode 0x0f 0xae mem/1. */
6726FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6727{
6728 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6729 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6730 return IEMOP_RAISE_INVALID_OPCODE();
6731
6732 IEM_MC_BEGIN(3, 1);
6733 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6734 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6735 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6738 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6739 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6740 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6741 IEM_MC_END();
6742 return VINF_SUCCESS;
6743}
6744
6745
6746/**
6747 * @opmaps grp15
6748 * @opcode !11/2
6749 * @oppfx none
6750 * @opcpuid sse
6751 * @opgroup og_sse_mxcsrsm
6752 * @opxcpttype 5
6753 * @optest op1=0 -> mxcsr=0
6754 * @optest op1=0x2083 -> mxcsr=0x2083
6755 * @optest op1=0xfffffffe -> value.xcpt=0xd
6756 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6757 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6758 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6759 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6760 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6761 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6762 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6763 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6764 */
6765FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6766{
6767 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6768 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6769 return IEMOP_RAISE_INVALID_OPCODE();
6770
6771 IEM_MC_BEGIN(2, 0);
6772 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6773 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6776 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6777 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6778 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6779 IEM_MC_END();
6780 return VINF_SUCCESS;
6781}
6782
6783
6784/**
6785 * @opmaps grp15
6786 * @opcode !11/3
6787 * @oppfx none
6788 * @opcpuid sse
6789 * @opgroup og_sse_mxcsrsm
6790 * @opxcpttype 5
6791 * @optest mxcsr=0 -> op1=0
6792 * @optest mxcsr=0x2083 -> op1=0x2083
6793 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6794 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6795 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6796 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6797 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6798 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6799 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6800 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6801 */
6802FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6803{
6804 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6805 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6806 return IEMOP_RAISE_INVALID_OPCODE();
6807
6808 IEM_MC_BEGIN(2, 0);
6809 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6810 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6814 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6815 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6816 IEM_MC_END();
6817 return VINF_SUCCESS;
6818}
6819
6820
6821/**
6822 * @opmaps grp15
6823 * @opcode !11/4
6824 * @oppfx none
6825 * @opcpuid xsave
6826 * @opgroup og_system
6827 * @opxcpttype none
6828 */
6829FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6830{
6831 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6832 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6833 return IEMOP_RAISE_INVALID_OPCODE();
6834
6835 IEM_MC_BEGIN(3, 0);
6836 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6837 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6838 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6841 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6842 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6843 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6844 IEM_MC_END();
6845 return VINF_SUCCESS;
6846}
6847
6848
6849/**
6850 * @opmaps grp15
6851 * @opcode !11/5
6852 * @oppfx none
6853 * @opcpuid xsave
6854 * @opgroup og_system
6855 * @opxcpttype none
6856 */
6857FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6858{
6859 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6860 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6861 return IEMOP_RAISE_INVALID_OPCODE();
6862
6863 IEM_MC_BEGIN(3, 0);
6864 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6865 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6866 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6869 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6870 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6871 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6872 IEM_MC_END();
6873 return VINF_SUCCESS;
6874}
6875
6876/** Opcode 0x0f 0xae mem/6. */
6877FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6878
6879/**
6880 * @opmaps grp15
6881 * @opcode !11/7
6882 * @oppfx none
6883 * @opcpuid clfsh
6884 * @opgroup og_cachectl
6885 * @optest op1=1 ->
6886 */
6887FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6888{
6889 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6890 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6891 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6892
6893 IEM_MC_BEGIN(2, 0);
6894 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6895 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6898 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6899 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6900 IEM_MC_END();
6901 return VINF_SUCCESS;
6902}
6903
6904/**
6905 * @opmaps grp15
6906 * @opcode !11/7
6907 * @oppfx 0x66
6908 * @opcpuid clflushopt
6909 * @opgroup og_cachectl
6910 * @optest op1=1 ->
6911 */
6912FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6913{
6914 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6915 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6916 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6917
6918 IEM_MC_BEGIN(2, 0);
6919 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6920 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6923 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6924 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6925 IEM_MC_END();
6926 return VINF_SUCCESS;
6927}
6928
6929
6930/** Opcode 0x0f 0xae 11b/5. */
6931FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6932{
6933 RT_NOREF_PV(bRm);
6934 IEMOP_MNEMONIC(lfence, "lfence");
6935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6936 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6937 return IEMOP_RAISE_INVALID_OPCODE();
6938
6939 IEM_MC_BEGIN(0, 0);
6940 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6941 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6942 else
6943 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6944 IEM_MC_ADVANCE_RIP();
6945 IEM_MC_END();
6946 return VINF_SUCCESS;
6947}
6948
6949
6950/** Opcode 0x0f 0xae 11b/6. */
6951FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6952{
6953 RT_NOREF_PV(bRm);
6954 IEMOP_MNEMONIC(mfence, "mfence");
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6957 return IEMOP_RAISE_INVALID_OPCODE();
6958
6959 IEM_MC_BEGIN(0, 0);
6960 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6961 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6962 else
6963 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6964 IEM_MC_ADVANCE_RIP();
6965 IEM_MC_END();
6966 return VINF_SUCCESS;
6967}
6968
6969
6970/** Opcode 0x0f 0xae 11b/7. */
6971FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6972{
6973 RT_NOREF_PV(bRm);
6974 IEMOP_MNEMONIC(sfence, "sfence");
6975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6976 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6977 return IEMOP_RAISE_INVALID_OPCODE();
6978
6979 IEM_MC_BEGIN(0, 0);
6980 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6981 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6982 else
6983 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6984 IEM_MC_ADVANCE_RIP();
6985 IEM_MC_END();
6986 return VINF_SUCCESS;
6987}
6988
6989
6990/** Opcode 0xf3 0x0f 0xae 11b/0. */
6991FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
6992{
6993 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
6994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6995 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6996 {
6997 IEM_MC_BEGIN(1, 0);
6998 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6999 IEM_MC_ARG(uint64_t, u64Dst, 0);
7000 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7001 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7002 IEM_MC_ADVANCE_RIP();
7003 IEM_MC_END();
7004 }
7005 else
7006 {
7007 IEM_MC_BEGIN(1, 0);
7008 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7009 IEM_MC_ARG(uint32_t, u32Dst, 0);
7010 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7011 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7012 IEM_MC_ADVANCE_RIP();
7013 IEM_MC_END();
7014 }
7015 return VINF_SUCCESS;
7016}
7017
7018/** Opcode 0xf3 0x0f 0xae 11b/1. */
7019FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7020{
7021 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7023 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7024 {
7025 IEM_MC_BEGIN(1, 0);
7026 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7027 IEM_MC_ARG(uint64_t, u64Dst, 0);
7028 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7029 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7030 IEM_MC_ADVANCE_RIP();
7031 IEM_MC_END();
7032 }
7033 else
7034 {
7035 IEM_MC_BEGIN(1, 0);
7036 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7037 IEM_MC_ARG(uint32_t, u32Dst, 0);
7038 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7039 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7040 IEM_MC_ADVANCE_RIP();
7041 IEM_MC_END();
7042 }
7043 return VINF_SUCCESS;
7044}
7045
7046/** Opcode 0xf3 0x0f 0xae 11b/2. */
7047FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7048{
7049 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7051 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7052 {
7053 IEM_MC_BEGIN(1, 0);
7054 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7055 IEM_MC_ARG(uint64_t, u64Dst, 0);
7056 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7057 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7058 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7059 IEM_MC_ADVANCE_RIP();
7060 IEM_MC_END();
7061 }
7062 else
7063 {
7064 IEM_MC_BEGIN(1, 0);
7065 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7066 IEM_MC_ARG(uint32_t, u32Dst, 0);
7067 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7068 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7069 IEM_MC_ADVANCE_RIP();
7070 IEM_MC_END();
7071 }
7072 return VINF_SUCCESS;
7073}
7074
7075/** Opcode 0xf3 0x0f 0xae 11b/3. */
7076FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7077{
7078 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7080 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7081 {
7082 IEM_MC_BEGIN(1, 0);
7083 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7084 IEM_MC_ARG(uint64_t, u64Dst, 0);
7085 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7086 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7087 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7088 IEM_MC_ADVANCE_RIP();
7089 IEM_MC_END();
7090 }
7091 else
7092 {
7093 IEM_MC_BEGIN(1, 0);
7094 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7095 IEM_MC_ARG(uint32_t, u32Dst, 0);
7096 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7097 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7098 IEM_MC_ADVANCE_RIP();
7099 IEM_MC_END();
7100 }
7101 return VINF_SUCCESS;
7102}
7103
7104
7105/**
7106 * Group 15 jump table for register variant.
7107 */
7108IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7109{ /* pfx: none, 066h, 0f3h, 0f2h */
7110 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7111 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7112 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7113 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7114 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7115 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7116 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7117 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7118};
7119AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7120
7121
7122/**
7123 * Group 15 jump table for memory variant.
7124 */
7125IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7126{ /* pfx: none, 066h, 0f3h, 0f2h */
7127 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7128 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7129 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7130 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7131 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7132 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7133 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7134 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7135};
7136AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7137
7138
7139/** Opcode 0x0f 0xae. */
7140FNIEMOP_DEF(iemOp_Grp15)
7141{
7142 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7144 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7145 /* register, register */
7146 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7147 + pVCpu->iem.s.idxPrefix], bRm);
7148 /* memory, register */
7149 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7150 + pVCpu->iem.s.idxPrefix], bRm);
7151}
7152
7153
7154/** Opcode 0x0f 0xaf. */
7155FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7156{
7157 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7158 IEMOP_HLP_MIN_386();
7159 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7160 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7161}
7162
7163
7164/** Opcode 0x0f 0xb0. */
7165FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7166{
7167 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7168 IEMOP_HLP_MIN_486();
7169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7170
7171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7172 {
7173 IEMOP_HLP_DONE_DECODING();
7174 IEM_MC_BEGIN(4, 0);
7175 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7176 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7177 IEM_MC_ARG(uint8_t, u8Src, 2);
7178 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7179
7180 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7181 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7182 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7183 IEM_MC_REF_EFLAGS(pEFlags);
7184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7185 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7186 else
7187 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7188
7189 IEM_MC_ADVANCE_RIP();
7190 IEM_MC_END();
7191 }
7192 else
7193 {
7194 IEM_MC_BEGIN(4, 3);
7195 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7196 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7197 IEM_MC_ARG(uint8_t, u8Src, 2);
7198 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7200 IEM_MC_LOCAL(uint8_t, u8Al);
7201
7202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7203 IEMOP_HLP_DONE_DECODING();
7204 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7205 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7206 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7207 IEM_MC_FETCH_EFLAGS(EFlags);
7208 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7209 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7210 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7211 else
7212 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7213
7214 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7215 IEM_MC_COMMIT_EFLAGS(EFlags);
7216 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7217 IEM_MC_ADVANCE_RIP();
7218 IEM_MC_END();
7219 }
7220 return VINF_SUCCESS;
7221}
7222
7223/** Opcode 0x0f 0xb1. */
7224FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7225{
7226 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7227 IEMOP_HLP_MIN_486();
7228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7229
7230 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7231 {
7232 IEMOP_HLP_DONE_DECODING();
7233 switch (pVCpu->iem.s.enmEffOpSize)
7234 {
7235 case IEMMODE_16BIT:
7236 IEM_MC_BEGIN(4, 0);
7237 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7238 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7239 IEM_MC_ARG(uint16_t, u16Src, 2);
7240 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7241
7242 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7243 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7244 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7245 IEM_MC_REF_EFLAGS(pEFlags);
7246 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7247 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7248 else
7249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7250
7251 IEM_MC_ADVANCE_RIP();
7252 IEM_MC_END();
7253 return VINF_SUCCESS;
7254
7255 case IEMMODE_32BIT:
7256 IEM_MC_BEGIN(4, 0);
7257 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7258 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7259 IEM_MC_ARG(uint32_t, u32Src, 2);
7260 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7261
7262 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7263 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7264 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7265 IEM_MC_REF_EFLAGS(pEFlags);
7266 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7267 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7268 else
7269 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7270
7271 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7272 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7273 IEM_MC_ADVANCE_RIP();
7274 IEM_MC_END();
7275 return VINF_SUCCESS;
7276
7277 case IEMMODE_64BIT:
7278 IEM_MC_BEGIN(4, 0);
7279 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7280 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7281#ifdef RT_ARCH_X86
7282 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7283#else
7284 IEM_MC_ARG(uint64_t, u64Src, 2);
7285#endif
7286 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7287
7288 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7289 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7290 IEM_MC_REF_EFLAGS(pEFlags);
7291#ifdef RT_ARCH_X86
7292 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7293 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7294 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7295 else
7296 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7297#else
7298 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7299 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7300 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7301 else
7302 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7303#endif
7304
7305 IEM_MC_ADVANCE_RIP();
7306 IEM_MC_END();
7307 return VINF_SUCCESS;
7308
7309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7310 }
7311 }
7312 else
7313 {
7314 switch (pVCpu->iem.s.enmEffOpSize)
7315 {
7316 case IEMMODE_16BIT:
7317 IEM_MC_BEGIN(4, 3);
7318 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7319 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7320 IEM_MC_ARG(uint16_t, u16Src, 2);
7321 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7323 IEM_MC_LOCAL(uint16_t, u16Ax);
7324
7325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7326 IEMOP_HLP_DONE_DECODING();
7327 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7328 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7329 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7330 IEM_MC_FETCH_EFLAGS(EFlags);
7331 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7333 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7334 else
7335 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7336
7337 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7338 IEM_MC_COMMIT_EFLAGS(EFlags);
7339 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7340 IEM_MC_ADVANCE_RIP();
7341 IEM_MC_END();
7342 return VINF_SUCCESS;
7343
7344 case IEMMODE_32BIT:
7345 IEM_MC_BEGIN(4, 3);
7346 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7347 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7348 IEM_MC_ARG(uint32_t, u32Src, 2);
7349 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7351 IEM_MC_LOCAL(uint32_t, u32Eax);
7352
7353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7354 IEMOP_HLP_DONE_DECODING();
7355 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7356 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7357 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7358 IEM_MC_FETCH_EFLAGS(EFlags);
7359 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7360 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7361 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7362 else
7363 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7364
7365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7366 IEM_MC_COMMIT_EFLAGS(EFlags);
7367 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7368 IEM_MC_ADVANCE_RIP();
7369 IEM_MC_END();
7370 return VINF_SUCCESS;
7371
7372 case IEMMODE_64BIT:
7373 IEM_MC_BEGIN(4, 3);
7374 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7375 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7376#ifdef RT_ARCH_X86
7377 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7378#else
7379 IEM_MC_ARG(uint64_t, u64Src, 2);
7380#endif
7381 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7383 IEM_MC_LOCAL(uint64_t, u64Rax);
7384
7385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7386 IEMOP_HLP_DONE_DECODING();
7387 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7388 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7389 IEM_MC_FETCH_EFLAGS(EFlags);
7390 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7391#ifdef RT_ARCH_X86
7392 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7393 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7394 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7395 else
7396 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7397#else
7398 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7399 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7400 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7401 else
7402 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7403#endif
7404
7405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7406 IEM_MC_COMMIT_EFLAGS(EFlags);
7407 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7408 IEM_MC_ADVANCE_RIP();
7409 IEM_MC_END();
7410 return VINF_SUCCESS;
7411
7412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7413 }
7414 }
7415}
7416
7417
7418FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7419{
7420 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7421 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7422
7423 switch (pVCpu->iem.s.enmEffOpSize)
7424 {
7425 case IEMMODE_16BIT:
7426 IEM_MC_BEGIN(5, 1);
7427 IEM_MC_ARG(uint16_t, uSel, 0);
7428 IEM_MC_ARG(uint16_t, offSeg, 1);
7429 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7430 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7432 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7435 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7436 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7437 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7438 IEM_MC_END();
7439 return VINF_SUCCESS;
7440
7441 case IEMMODE_32BIT:
7442 IEM_MC_BEGIN(5, 1);
7443 IEM_MC_ARG(uint16_t, uSel, 0);
7444 IEM_MC_ARG(uint32_t, offSeg, 1);
7445 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7446 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7447 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7448 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7451 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7452 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7453 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7454 IEM_MC_END();
7455 return VINF_SUCCESS;
7456
7457 case IEMMODE_64BIT:
7458 IEM_MC_BEGIN(5, 1);
7459 IEM_MC_ARG(uint16_t, uSel, 0);
7460 IEM_MC_ARG(uint64_t, offSeg, 1);
7461 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7462 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7463 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7464 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7467 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7468 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7469 else
7470 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7471 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7472 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7473 IEM_MC_END();
7474 return VINF_SUCCESS;
7475
7476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7477 }
7478}
7479
7480
7481/** Opcode 0x0f 0xb2. */
7482FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7483{
7484 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7485 IEMOP_HLP_MIN_386();
7486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7487 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7488 return IEMOP_RAISE_INVALID_OPCODE();
7489 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7490}
7491
7492
7493/** Opcode 0x0f 0xb3. */
7494FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7495{
7496 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7497 IEMOP_HLP_MIN_386();
7498 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7499}
7500
7501
7502/** Opcode 0x0f 0xb4. */
7503FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7504{
7505 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7506 IEMOP_HLP_MIN_386();
7507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7508 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7509 return IEMOP_RAISE_INVALID_OPCODE();
7510 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7511}
7512
7513
7514/** Opcode 0x0f 0xb5. */
7515FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7516{
7517 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7518 IEMOP_HLP_MIN_386();
7519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7521 return IEMOP_RAISE_INVALID_OPCODE();
7522 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7523}
7524
7525
7526/** Opcode 0x0f 0xb6. */
7527FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7528{
7529 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7530 IEMOP_HLP_MIN_386();
7531
7532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7533
7534 /*
7535 * If rm is denoting a register, no more instruction bytes.
7536 */
7537 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7538 {
7539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7540 switch (pVCpu->iem.s.enmEffOpSize)
7541 {
7542 case IEMMODE_16BIT:
7543 IEM_MC_BEGIN(0, 1);
7544 IEM_MC_LOCAL(uint16_t, u16Value);
7545 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7546 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7547 IEM_MC_ADVANCE_RIP();
7548 IEM_MC_END();
7549 return VINF_SUCCESS;
7550
7551 case IEMMODE_32BIT:
7552 IEM_MC_BEGIN(0, 1);
7553 IEM_MC_LOCAL(uint32_t, u32Value);
7554 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7555 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7556 IEM_MC_ADVANCE_RIP();
7557 IEM_MC_END();
7558 return VINF_SUCCESS;
7559
7560 case IEMMODE_64BIT:
7561 IEM_MC_BEGIN(0, 1);
7562 IEM_MC_LOCAL(uint64_t, u64Value);
7563 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7564 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7565 IEM_MC_ADVANCE_RIP();
7566 IEM_MC_END();
7567 return VINF_SUCCESS;
7568
7569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7570 }
7571 }
7572 else
7573 {
7574 /*
7575 * We're loading a register from memory.
7576 */
7577 switch (pVCpu->iem.s.enmEffOpSize)
7578 {
7579 case IEMMODE_16BIT:
7580 IEM_MC_BEGIN(0, 2);
7581 IEM_MC_LOCAL(uint16_t, u16Value);
7582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7585 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7586 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7587 IEM_MC_ADVANCE_RIP();
7588 IEM_MC_END();
7589 return VINF_SUCCESS;
7590
7591 case IEMMODE_32BIT:
7592 IEM_MC_BEGIN(0, 2);
7593 IEM_MC_LOCAL(uint32_t, u32Value);
7594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7597 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7598 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7599 IEM_MC_ADVANCE_RIP();
7600 IEM_MC_END();
7601 return VINF_SUCCESS;
7602
7603 case IEMMODE_64BIT:
7604 IEM_MC_BEGIN(0, 2);
7605 IEM_MC_LOCAL(uint64_t, u64Value);
7606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7609 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7610 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7611 IEM_MC_ADVANCE_RIP();
7612 IEM_MC_END();
7613 return VINF_SUCCESS;
7614
7615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7616 }
7617 }
7618}
7619
7620
7621/** Opcode 0x0f 0xb7. */
7622FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7623{
7624 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7625 IEMOP_HLP_MIN_386();
7626
7627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7628
7629 /** @todo Not entirely sure how the operand size prefix is handled here,
7630 * assuming that it will be ignored. Would be nice to have a few
7631 * test for this. */
7632 /*
7633 * If rm is denoting a register, no more instruction bytes.
7634 */
7635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7636 {
7637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7638 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7639 {
7640 IEM_MC_BEGIN(0, 1);
7641 IEM_MC_LOCAL(uint32_t, u32Value);
7642 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7643 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7644 IEM_MC_ADVANCE_RIP();
7645 IEM_MC_END();
7646 }
7647 else
7648 {
7649 IEM_MC_BEGIN(0, 1);
7650 IEM_MC_LOCAL(uint64_t, u64Value);
7651 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7652 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7653 IEM_MC_ADVANCE_RIP();
7654 IEM_MC_END();
7655 }
7656 }
7657 else
7658 {
7659 /*
7660 * We're loading a register from memory.
7661 */
7662 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7663 {
7664 IEM_MC_BEGIN(0, 2);
7665 IEM_MC_LOCAL(uint32_t, u32Value);
7666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7669 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7670 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7671 IEM_MC_ADVANCE_RIP();
7672 IEM_MC_END();
7673 }
7674 else
7675 {
7676 IEM_MC_BEGIN(0, 2);
7677 IEM_MC_LOCAL(uint64_t, u64Value);
7678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7681 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7682 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7683 IEM_MC_ADVANCE_RIP();
7684 IEM_MC_END();
7685 }
7686 }
7687 return VINF_SUCCESS;
7688}
7689
7690
7691/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7692FNIEMOP_UD_STUB(iemOp_jmpe);
7693/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7694FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7695
7696
7697/**
7698 * @opcode 0xb9
7699 * @opinvalid intel-modrm
7700 * @optest ->
7701 */
7702FNIEMOP_DEF(iemOp_Grp10)
7703{
7704 /*
7705 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7706 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7707 */
7708 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7709 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7710 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7711}
7712
7713
7714/** Opcode 0x0f 0xba. */
7715FNIEMOP_DEF(iemOp_Grp8)
7716{
7717 IEMOP_HLP_MIN_386();
7718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7719 PCIEMOPBINSIZES pImpl;
7720 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7721 {
7722 case 0: case 1: case 2: case 3:
7723 /* Both AMD and Intel want full modr/m decoding and imm8. */
7724 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7725 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7726 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7727 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7728 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7730 }
7731 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7732
7733 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7734 {
7735 /* register destination. */
7736 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7738
7739 switch (pVCpu->iem.s.enmEffOpSize)
7740 {
7741 case IEMMODE_16BIT:
7742 IEM_MC_BEGIN(3, 0);
7743 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7744 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7745 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7746
7747 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7748 IEM_MC_REF_EFLAGS(pEFlags);
7749 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7750
7751 IEM_MC_ADVANCE_RIP();
7752 IEM_MC_END();
7753 return VINF_SUCCESS;
7754
7755 case IEMMODE_32BIT:
7756 IEM_MC_BEGIN(3, 0);
7757 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7758 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7759 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7760
7761 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7762 IEM_MC_REF_EFLAGS(pEFlags);
7763 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7764
7765 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7766 IEM_MC_ADVANCE_RIP();
7767 IEM_MC_END();
7768 return VINF_SUCCESS;
7769
7770 case IEMMODE_64BIT:
7771 IEM_MC_BEGIN(3, 0);
7772 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7773 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7774 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7775
7776 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7777 IEM_MC_REF_EFLAGS(pEFlags);
7778 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7779
7780 IEM_MC_ADVANCE_RIP();
7781 IEM_MC_END();
7782 return VINF_SUCCESS;
7783
7784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7785 }
7786 }
7787 else
7788 {
7789 /* memory destination. */
7790
7791 uint32_t fAccess;
7792 if (pImpl->pfnLockedU16)
7793 fAccess = IEM_ACCESS_DATA_RW;
7794 else /* BT */
7795 fAccess = IEM_ACCESS_DATA_R;
7796
7797 /** @todo test negative bit offsets! */
7798 switch (pVCpu->iem.s.enmEffOpSize)
7799 {
7800 case IEMMODE_16BIT:
7801 IEM_MC_BEGIN(3, 1);
7802 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7803 IEM_MC_ARG(uint16_t, u16Src, 1);
7804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7806
7807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7808 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7809 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7810 if (pImpl->pfnLockedU16)
7811 IEMOP_HLP_DONE_DECODING();
7812 else
7813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7814 IEM_MC_FETCH_EFLAGS(EFlags);
7815 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7816 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7817 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7818 else
7819 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7820 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7821
7822 IEM_MC_COMMIT_EFLAGS(EFlags);
7823 IEM_MC_ADVANCE_RIP();
7824 IEM_MC_END();
7825 return VINF_SUCCESS;
7826
7827 case IEMMODE_32BIT:
7828 IEM_MC_BEGIN(3, 1);
7829 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7830 IEM_MC_ARG(uint32_t, u32Src, 1);
7831 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7833
7834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7835 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7836 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7837 if (pImpl->pfnLockedU16)
7838 IEMOP_HLP_DONE_DECODING();
7839 else
7840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7841 IEM_MC_FETCH_EFLAGS(EFlags);
7842 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7843 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7844 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7845 else
7846 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7847 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7848
7849 IEM_MC_COMMIT_EFLAGS(EFlags);
7850 IEM_MC_ADVANCE_RIP();
7851 IEM_MC_END();
7852 return VINF_SUCCESS;
7853
7854 case IEMMODE_64BIT:
7855 IEM_MC_BEGIN(3, 1);
7856 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7857 IEM_MC_ARG(uint64_t, u64Src, 1);
7858 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7860
7861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7862 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7863 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7864 if (pImpl->pfnLockedU16)
7865 IEMOP_HLP_DONE_DECODING();
7866 else
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868 IEM_MC_FETCH_EFLAGS(EFlags);
7869 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7870 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7871 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7872 else
7873 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7874 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7875
7876 IEM_MC_COMMIT_EFLAGS(EFlags);
7877 IEM_MC_ADVANCE_RIP();
7878 IEM_MC_END();
7879 return VINF_SUCCESS;
7880
7881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7882 }
7883 }
7884}
7885
7886
7887/** Opcode 0x0f 0xbb. */
7888FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7889{
7890 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7891 IEMOP_HLP_MIN_386();
7892 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7893}
7894
7895
7896/** Opcode 0x0f 0xbc. */
7897FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7898{
7899 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7900 IEMOP_HLP_MIN_386();
7901 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7902 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7903}
7904
7905
7906/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7907FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7908
7909
7910/** Opcode 0x0f 0xbd. */
7911FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7912{
7913 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7914 IEMOP_HLP_MIN_386();
7915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7916 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7917}
7918
7919
7920/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7921FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7922
7923
7924/** Opcode 0x0f 0xbe. */
7925FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7926{
7927 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7928 IEMOP_HLP_MIN_386();
7929
7930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7931
7932 /*
7933 * If rm is denoting a register, no more instruction bytes.
7934 */
7935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7936 {
7937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7938 switch (pVCpu->iem.s.enmEffOpSize)
7939 {
7940 case IEMMODE_16BIT:
7941 IEM_MC_BEGIN(0, 1);
7942 IEM_MC_LOCAL(uint16_t, u16Value);
7943 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7944 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7945 IEM_MC_ADVANCE_RIP();
7946 IEM_MC_END();
7947 return VINF_SUCCESS;
7948
7949 case IEMMODE_32BIT:
7950 IEM_MC_BEGIN(0, 1);
7951 IEM_MC_LOCAL(uint32_t, u32Value);
7952 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7953 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7954 IEM_MC_ADVANCE_RIP();
7955 IEM_MC_END();
7956 return VINF_SUCCESS;
7957
7958 case IEMMODE_64BIT:
7959 IEM_MC_BEGIN(0, 1);
7960 IEM_MC_LOCAL(uint64_t, u64Value);
7961 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7962 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7963 IEM_MC_ADVANCE_RIP();
7964 IEM_MC_END();
7965 return VINF_SUCCESS;
7966
7967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7968 }
7969 }
7970 else
7971 {
7972 /*
7973 * We're loading a register from memory.
7974 */
7975 switch (pVCpu->iem.s.enmEffOpSize)
7976 {
7977 case IEMMODE_16BIT:
7978 IEM_MC_BEGIN(0, 2);
7979 IEM_MC_LOCAL(uint16_t, u16Value);
7980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7983 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7984 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7985 IEM_MC_ADVANCE_RIP();
7986 IEM_MC_END();
7987 return VINF_SUCCESS;
7988
7989 case IEMMODE_32BIT:
7990 IEM_MC_BEGIN(0, 2);
7991 IEM_MC_LOCAL(uint32_t, u32Value);
7992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7995 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7996 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7997 IEM_MC_ADVANCE_RIP();
7998 IEM_MC_END();
7999 return VINF_SUCCESS;
8000
8001 case IEMMODE_64BIT:
8002 IEM_MC_BEGIN(0, 2);
8003 IEM_MC_LOCAL(uint64_t, u64Value);
8004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8007 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8008 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8009 IEM_MC_ADVANCE_RIP();
8010 IEM_MC_END();
8011 return VINF_SUCCESS;
8012
8013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8014 }
8015 }
8016}
8017
8018
8019/** Opcode 0x0f 0xbf. */
8020FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8021{
8022 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8023 IEMOP_HLP_MIN_386();
8024
8025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8026
8027 /** @todo Not entirely sure how the operand size prefix is handled here,
8028 * assuming that it will be ignored. Would be nice to have a few
8029 * test for this. */
8030 /*
8031 * If rm is denoting a register, no more instruction bytes.
8032 */
8033 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8034 {
8035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8036 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8037 {
8038 IEM_MC_BEGIN(0, 1);
8039 IEM_MC_LOCAL(uint32_t, u32Value);
8040 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8041 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8042 IEM_MC_ADVANCE_RIP();
8043 IEM_MC_END();
8044 }
8045 else
8046 {
8047 IEM_MC_BEGIN(0, 1);
8048 IEM_MC_LOCAL(uint64_t, u64Value);
8049 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8050 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8051 IEM_MC_ADVANCE_RIP();
8052 IEM_MC_END();
8053 }
8054 }
8055 else
8056 {
8057 /*
8058 * We're loading a register from memory.
8059 */
8060 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8061 {
8062 IEM_MC_BEGIN(0, 2);
8063 IEM_MC_LOCAL(uint32_t, u32Value);
8064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8067 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8068 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8069 IEM_MC_ADVANCE_RIP();
8070 IEM_MC_END();
8071 }
8072 else
8073 {
8074 IEM_MC_BEGIN(0, 2);
8075 IEM_MC_LOCAL(uint64_t, u64Value);
8076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8079 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8080 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8081 IEM_MC_ADVANCE_RIP();
8082 IEM_MC_END();
8083 }
8084 }
8085 return VINF_SUCCESS;
8086}
8087
8088
8089/** Opcode 0x0f 0xc0. */
8090FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8091{
8092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8093 IEMOP_HLP_MIN_486();
8094 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8095
8096 /*
8097 * If rm is denoting a register, no more instruction bytes.
8098 */
8099 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8100 {
8101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8102
8103 IEM_MC_BEGIN(3, 0);
8104 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8105 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8106 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8107
8108 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8109 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8110 IEM_MC_REF_EFLAGS(pEFlags);
8111 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8112
8113 IEM_MC_ADVANCE_RIP();
8114 IEM_MC_END();
8115 }
8116 else
8117 {
8118 /*
8119 * We're accessing memory.
8120 */
8121 IEM_MC_BEGIN(3, 3);
8122 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8123 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8124 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8125 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8127
8128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8129 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8130 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8131 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8132 IEM_MC_FETCH_EFLAGS(EFlags);
8133 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8134 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8135 else
8136 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8137
8138 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8139 IEM_MC_COMMIT_EFLAGS(EFlags);
8140 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8141 IEM_MC_ADVANCE_RIP();
8142 IEM_MC_END();
8143 return VINF_SUCCESS;
8144 }
8145 return VINF_SUCCESS;
8146}
8147
8148
8149/** Opcode 0x0f 0xc1. */
8150FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8151{
8152 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8153 IEMOP_HLP_MIN_486();
8154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8155
8156 /*
8157 * If rm is denoting a register, no more instruction bytes.
8158 */
8159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8160 {
8161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8162
8163 switch (pVCpu->iem.s.enmEffOpSize)
8164 {
8165 case IEMMODE_16BIT:
8166 IEM_MC_BEGIN(3, 0);
8167 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8168 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8169 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8170
8171 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8172 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8173 IEM_MC_REF_EFLAGS(pEFlags);
8174 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8175
8176 IEM_MC_ADVANCE_RIP();
8177 IEM_MC_END();
8178 return VINF_SUCCESS;
8179
8180 case IEMMODE_32BIT:
8181 IEM_MC_BEGIN(3, 0);
8182 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8183 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8184 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8185
8186 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8187 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8188 IEM_MC_REF_EFLAGS(pEFlags);
8189 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8190
8191 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8192 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8193 IEM_MC_ADVANCE_RIP();
8194 IEM_MC_END();
8195 return VINF_SUCCESS;
8196
8197 case IEMMODE_64BIT:
8198 IEM_MC_BEGIN(3, 0);
8199 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8200 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8201 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8202
8203 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8204 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8205 IEM_MC_REF_EFLAGS(pEFlags);
8206 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8207
8208 IEM_MC_ADVANCE_RIP();
8209 IEM_MC_END();
8210 return VINF_SUCCESS;
8211
8212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8213 }
8214 }
8215 else
8216 {
8217 /*
8218 * We're accessing memory.
8219 */
8220 switch (pVCpu->iem.s.enmEffOpSize)
8221 {
8222 case IEMMODE_16BIT:
8223 IEM_MC_BEGIN(3, 3);
8224 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8225 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8226 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8227 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8229
8230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8231 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8232 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8233 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8234 IEM_MC_FETCH_EFLAGS(EFlags);
8235 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8236 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8237 else
8238 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8239
8240 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8241 IEM_MC_COMMIT_EFLAGS(EFlags);
8242 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8243 IEM_MC_ADVANCE_RIP();
8244 IEM_MC_END();
8245 return VINF_SUCCESS;
8246
8247 case IEMMODE_32BIT:
8248 IEM_MC_BEGIN(3, 3);
8249 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8250 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8251 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8252 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8254
8255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8256 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8257 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8258 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8259 IEM_MC_FETCH_EFLAGS(EFlags);
8260 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8261 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8262 else
8263 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8264
8265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8266 IEM_MC_COMMIT_EFLAGS(EFlags);
8267 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8268 IEM_MC_ADVANCE_RIP();
8269 IEM_MC_END();
8270 return VINF_SUCCESS;
8271
8272 case IEMMODE_64BIT:
8273 IEM_MC_BEGIN(3, 3);
8274 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8275 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8276 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8277 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8279
8280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8281 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8282 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8283 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8284 IEM_MC_FETCH_EFLAGS(EFlags);
8285 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8286 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8287 else
8288 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8289
8290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8291 IEM_MC_COMMIT_EFLAGS(EFlags);
8292 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8293 IEM_MC_ADVANCE_RIP();
8294 IEM_MC_END();
8295 return VINF_SUCCESS;
8296
8297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8298 }
8299 }
8300}
8301
8302
8303/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8304FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8305/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8306FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8307/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8308FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8309/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8310FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8311
8312
8313/** Opcode 0x0f 0xc3. */
8314FNIEMOP_DEF(iemOp_movnti_My_Gy)
8315{
8316 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8317
8318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8319
8320 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8321 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8322 {
8323 switch (pVCpu->iem.s.enmEffOpSize)
8324 {
8325 case IEMMODE_32BIT:
8326 IEM_MC_BEGIN(0, 2);
8327 IEM_MC_LOCAL(uint32_t, u32Value);
8328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8329
8330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8332 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8333 return IEMOP_RAISE_INVALID_OPCODE();
8334
8335 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8336 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8337 IEM_MC_ADVANCE_RIP();
8338 IEM_MC_END();
8339 break;
8340
8341 case IEMMODE_64BIT:
8342 IEM_MC_BEGIN(0, 2);
8343 IEM_MC_LOCAL(uint64_t, u64Value);
8344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8345
8346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8348 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8349 return IEMOP_RAISE_INVALID_OPCODE();
8350
8351 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8352 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8353 IEM_MC_ADVANCE_RIP();
8354 IEM_MC_END();
8355 break;
8356
8357 case IEMMODE_16BIT:
8358 /** @todo check this form. */
8359 return IEMOP_RAISE_INVALID_OPCODE();
8360 }
8361 }
8362 else
8363 return IEMOP_RAISE_INVALID_OPCODE();
8364 return VINF_SUCCESS;
8365}
8366/* Opcode 0x66 0x0f 0xc3 - invalid */
8367/* Opcode 0xf3 0x0f 0xc3 - invalid */
8368/* Opcode 0xf2 0x0f 0xc3 - invalid */
8369
8370/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8371FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8372/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8373FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8374/* Opcode 0xf3 0x0f 0xc4 - invalid */
8375/* Opcode 0xf2 0x0f 0xc4 - invalid */
8376
8377/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8378FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8379/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8380FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8381/* Opcode 0xf3 0x0f 0xc5 - invalid */
8382/* Opcode 0xf2 0x0f 0xc5 - invalid */
8383
8384/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8385FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8386/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8387FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8388/* Opcode 0xf3 0x0f 0xc6 - invalid */
8389/* Opcode 0xf2 0x0f 0xc6 - invalid */
8390
8391
8392/** Opcode 0x0f 0xc7 !11/1. */
8393FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8394{
8395 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8396
8397 IEM_MC_BEGIN(4, 3);
8398 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8399 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8400 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8401 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8402 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8403 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8405
8406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8407 IEMOP_HLP_DONE_DECODING();
8408 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8409
8410 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8411 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8412 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8413
8414 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8415 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8416 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8417
8418 IEM_MC_FETCH_EFLAGS(EFlags);
8419 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8420 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8421 else
8422 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8423
8424 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8425 IEM_MC_COMMIT_EFLAGS(EFlags);
8426 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8427 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8428 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8429 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8430 IEM_MC_ENDIF();
8431 IEM_MC_ADVANCE_RIP();
8432
8433 IEM_MC_END();
8434 return VINF_SUCCESS;
8435}
8436
8437
8438/** Opcode REX.W 0x0f 0xc7 !11/1. */
8439FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8440{
8441 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8442 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8443 {
8444#if 0
8445 RT_NOREF(bRm);
8446 IEMOP_BITCH_ABOUT_STUB();
8447 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8448#else
8449 IEM_MC_BEGIN(4, 3);
8450 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8451 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8452 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8453 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8454 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8455 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8457
8458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8459 IEMOP_HLP_DONE_DECODING();
8460 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8461 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8462
8463 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8464 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8465 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8466
8467 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8468 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8469 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8470
8471 IEM_MC_FETCH_EFLAGS(EFlags);
8472# ifdef RT_ARCH_AMD64
8473 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8474 {
8475 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8476 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8477 else
8478 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8479 }
8480 else
8481# endif
8482 {
8483 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8484 accesses and not all all atomic, which works fine on in UNI CPU guest
8485 configuration (ignoring DMA). If guest SMP is active we have no choice
8486 but to use a rendezvous callback here. Sigh. */
8487 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8488 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8489 else
8490 {
8491 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8492 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8493 }
8494 }
8495
8496 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8497 IEM_MC_COMMIT_EFLAGS(EFlags);
8498 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8499 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8500 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8501 IEM_MC_ENDIF();
8502 IEM_MC_ADVANCE_RIP();
8503
8504 IEM_MC_END();
8505 return VINF_SUCCESS;
8506#endif
8507 }
8508 Log(("cmpxchg16b -> #UD\n"));
8509 return IEMOP_RAISE_INVALID_OPCODE();
8510}
8511
8512FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8513{
8514 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8515 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8516 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8517}
8518
8519/** Opcode 0x0f 0xc7 11/6. */
8520FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8521
8522/** Opcode 0x0f 0xc7 !11/6. */
8523FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8524
8525/** Opcode 0x66 0x0f 0xc7 !11/6. */
8526FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8527
8528/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8529FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8530
8531/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8532FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8533
8534/** Opcode 0x0f 0xc7 11/7. */
8535FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8536
8537
8538/**
8539 * Group 9 jump table for register variant.
8540 */
8541IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8542{ /* pfx: none, 066h, 0f3h, 0f2h */
8543 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8544 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8545 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8546 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8547 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8548 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8549 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8550 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8551};
8552AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8553
8554
8555/**
8556 * Group 9 jump table for memory variant.
8557 */
8558IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8559{ /* pfx: none, 066h, 0f3h, 0f2h */
8560 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8561 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8562 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8563 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8564 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8565 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8566 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8567 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8568};
8569AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8570
8571
8572/** Opcode 0x0f 0xc7. */
8573FNIEMOP_DEF(iemOp_Grp9)
8574{
8575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8577 /* register, register */
8578 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8579 + pVCpu->iem.s.idxPrefix], bRm);
8580 /* memory, register */
8581 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8582 + pVCpu->iem.s.idxPrefix], bRm);
8583}
8584
8585
8586/**
8587 * Common 'bswap register' helper.
8588 */
8589FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8590{
8591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8592 switch (pVCpu->iem.s.enmEffOpSize)
8593 {
8594 case IEMMODE_16BIT:
8595 IEM_MC_BEGIN(1, 0);
8596 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8597 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8598 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8599 IEM_MC_ADVANCE_RIP();
8600 IEM_MC_END();
8601 return VINF_SUCCESS;
8602
8603 case IEMMODE_32BIT:
8604 IEM_MC_BEGIN(1, 0);
8605 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8606 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8607 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8608 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8609 IEM_MC_ADVANCE_RIP();
8610 IEM_MC_END();
8611 return VINF_SUCCESS;
8612
8613 case IEMMODE_64BIT:
8614 IEM_MC_BEGIN(1, 0);
8615 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8616 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8617 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8618 IEM_MC_ADVANCE_RIP();
8619 IEM_MC_END();
8620 return VINF_SUCCESS;
8621
8622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8623 }
8624}
8625
8626
8627/** Opcode 0x0f 0xc8. */
8628FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8629{
8630 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8631 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8632 prefix. REX.B is the correct prefix it appears. For a parallel
8633 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8634 IEMOP_HLP_MIN_486();
8635 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8636}
8637
8638
8639/** Opcode 0x0f 0xc9. */
8640FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8641{
8642 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8643 IEMOP_HLP_MIN_486();
8644 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8645}
8646
8647
8648/** Opcode 0x0f 0xca. */
8649FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8650{
8651 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8652 IEMOP_HLP_MIN_486();
8653 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8654}
8655
8656
8657/** Opcode 0x0f 0xcb. */
8658FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8659{
8660 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8661 IEMOP_HLP_MIN_486();
8662 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8663}
8664
8665
8666/** Opcode 0x0f 0xcc. */
8667FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8668{
8669 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8670 IEMOP_HLP_MIN_486();
8671 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8672}
8673
8674
8675/** Opcode 0x0f 0xcd. */
8676FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8677{
8678 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8679 IEMOP_HLP_MIN_486();
8680 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8681}
8682
8683
8684/** Opcode 0x0f 0xce. */
8685FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8686{
8687 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8688 IEMOP_HLP_MIN_486();
8689 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8690}
8691
8692
8693/** Opcode 0x0f 0xcf. */
8694FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8695{
8696 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8697 IEMOP_HLP_MIN_486();
8698 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8699}
8700
8701
8702/* Opcode 0x0f 0xd0 - invalid */
8703/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8704FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8705/* Opcode 0xf3 0x0f 0xd0 - invalid */
8706/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8707FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8708
8709/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8710FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8711/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8712FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8713/* Opcode 0xf3 0x0f 0xd1 - invalid */
8714/* Opcode 0xf2 0x0f 0xd1 - invalid */
8715
8716/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8717FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8718/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8719FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8720/* Opcode 0xf3 0x0f 0xd2 - invalid */
8721/* Opcode 0xf2 0x0f 0xd2 - invalid */
8722
8723/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8724FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8725/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8726FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8727/* Opcode 0xf3 0x0f 0xd3 - invalid */
8728/* Opcode 0xf2 0x0f 0xd3 - invalid */
8729
8730/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8731FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8732/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8733FNIEMOP_STUB(iemOp_paddq_Vx_W);
8734/* Opcode 0xf3 0x0f 0xd4 - invalid */
8735/* Opcode 0xf2 0x0f 0xd4 - invalid */
8736
8737/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8738FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8739/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8740FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8741/* Opcode 0xf3 0x0f 0xd5 - invalid */
8742/* Opcode 0xf2 0x0f 0xd5 - invalid */
8743
8744/* Opcode 0x0f 0xd6 - invalid */
8745
8746/**
8747 * @opcode 0xd6
8748 * @oppfx 0x66
8749 * @opcpuid sse2
8750 * @opgroup og_sse2_pcksclr_datamove
8751 * @opxcpttype none
8752 * @optest op1=-1 op2=2 -> op1=2
8753 * @optest op1=0 op2=-42 -> op1=-42
8754 */
8755FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8756{
8757 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8760 {
8761 /*
8762 * Register, register.
8763 */
8764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8765 IEM_MC_BEGIN(0, 2);
8766 IEM_MC_LOCAL(uint64_t, uSrc);
8767
8768 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8769 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8770
8771 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8772 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8773
8774 IEM_MC_ADVANCE_RIP();
8775 IEM_MC_END();
8776 }
8777 else
8778 {
8779 /*
8780 * Memory, register.
8781 */
8782 IEM_MC_BEGIN(0, 2);
8783 IEM_MC_LOCAL(uint64_t, uSrc);
8784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8785
8786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8788 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8789 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8790
8791 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8792 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8793
8794 IEM_MC_ADVANCE_RIP();
8795 IEM_MC_END();
8796 }
8797 return VINF_SUCCESS;
8798}
8799
8800
8801/**
8802 * @opcode 0xd6
8803 * @opcodesub 11 mr/reg
8804 * @oppfx f3
8805 * @opcpuid sse2
8806 * @opgroup og_sse2_simdint_datamove
8807 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8808 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8809 */
8810FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8811{
8812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8813 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8814 {
8815 /*
8816 * Register, register.
8817 */
8818 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8820 IEM_MC_BEGIN(0, 1);
8821 IEM_MC_LOCAL(uint64_t, uSrc);
8822
8823 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8824 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8825
8826 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8827 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8828 IEM_MC_FPU_TO_MMX_MODE();
8829
8830 IEM_MC_ADVANCE_RIP();
8831 IEM_MC_END();
8832 return VINF_SUCCESS;
8833 }
8834
8835 /**
8836 * @opdone
8837 * @opmnemonic udf30fd6mem
8838 * @opcode 0xd6
8839 * @opcodesub !11 mr/reg
8840 * @oppfx f3
8841 * @opunused intel-modrm
8842 * @opcpuid sse
8843 * @optest ->
8844 */
8845 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8846}
8847
8848
8849/**
8850 * @opcode 0xd6
8851 * @opcodesub 11 mr/reg
8852 * @oppfx f2
8853 * @opcpuid sse2
8854 * @opgroup og_sse2_simdint_datamove
8855 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8856 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8857 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8858 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8859 * @optest op1=-42 op2=0xfedcba9876543210
8860 * -> op1=0xfedcba9876543210 ftw=0xff
8861 */
8862FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8863{
8864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8866 {
8867 /*
8868 * Register, register.
8869 */
8870 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8872 IEM_MC_BEGIN(0, 1);
8873 IEM_MC_LOCAL(uint64_t, uSrc);
8874
8875 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8876 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8877
8878 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8879 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8880 IEM_MC_FPU_TO_MMX_MODE();
8881
8882 IEM_MC_ADVANCE_RIP();
8883 IEM_MC_END();
8884 return VINF_SUCCESS;
8885 }
8886
8887 /**
8888 * @opdone
8889 * @opmnemonic udf20fd6mem
8890 * @opcode 0xd6
8891 * @opcodesub !11 mr/reg
8892 * @oppfx f2
8893 * @opunused intel-modrm
8894 * @opcpuid sse
8895 * @optest ->
8896 */
8897 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8898}
8899
8900/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8901FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8902{
8903 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8904 /** @todo testcase: Check that the instruction implicitly clears the high
8905 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8906 * and opcode modifications are made to work with the whole width (not
8907 * just 128). */
8908 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8909 /* Docs says register only. */
8910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8911 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8912 {
8913 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8914 IEM_MC_BEGIN(2, 0);
8915 IEM_MC_ARG(uint64_t *, pDst, 0);
8916 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8917 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8918 IEM_MC_PREPARE_FPU_USAGE();
8919 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8920 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8921 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8922 IEM_MC_ADVANCE_RIP();
8923 IEM_MC_END();
8924 return VINF_SUCCESS;
8925 }
8926 return IEMOP_RAISE_INVALID_OPCODE();
8927}
8928
8929/** Opcode 0x66 0x0f 0xd7 - */
8930FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8931{
8932 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8933 /** @todo testcase: Check that the instruction implicitly clears the high
8934 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8935 * and opcode modifications are made to work with the whole width (not
8936 * just 128). */
8937 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8938 /* Docs says register only. */
8939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8940 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8941 {
8942 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8943 IEM_MC_BEGIN(2, 0);
8944 IEM_MC_ARG(uint64_t *, pDst, 0);
8945 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8946 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8947 IEM_MC_PREPARE_SSE_USAGE();
8948 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8949 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8950 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8951 IEM_MC_ADVANCE_RIP();
8952 IEM_MC_END();
8953 return VINF_SUCCESS;
8954 }
8955 return IEMOP_RAISE_INVALID_OPCODE();
8956}
8957
8958/* Opcode 0xf3 0x0f 0xd7 - invalid */
8959/* Opcode 0xf2 0x0f 0xd7 - invalid */
8960
8961
8962/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8963FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8964/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8965FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8966/* Opcode 0xf3 0x0f 0xd8 - invalid */
8967/* Opcode 0xf2 0x0f 0xd8 - invalid */
8968
8969/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8970FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8971/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8972FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8973/* Opcode 0xf3 0x0f 0xd9 - invalid */
8974/* Opcode 0xf2 0x0f 0xd9 - invalid */
8975
8976/** Opcode 0x0f 0xda - pminub Pq, Qq */
8977FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8978/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8979FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8980/* Opcode 0xf3 0x0f 0xda - invalid */
8981/* Opcode 0xf2 0x0f 0xda - invalid */
8982
8983/** Opcode 0x0f 0xdb - pand Pq, Qq */
8984FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8985/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8986FNIEMOP_STUB(iemOp_pand_Vx_W);
8987/* Opcode 0xf3 0x0f 0xdb - invalid */
8988/* Opcode 0xf2 0x0f 0xdb - invalid */
8989
8990/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8991FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8992/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8993FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8994/* Opcode 0xf3 0x0f 0xdc - invalid */
8995/* Opcode 0xf2 0x0f 0xdc - invalid */
8996
8997/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8998FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8999/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9000FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9001/* Opcode 0xf3 0x0f 0xdd - invalid */
9002/* Opcode 0xf2 0x0f 0xdd - invalid */
9003
9004/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9005FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9006/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9007FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9008/* Opcode 0xf3 0x0f 0xde - invalid */
9009/* Opcode 0xf2 0x0f 0xde - invalid */
9010
9011/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9012FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
9013/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9014FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
9015/* Opcode 0xf3 0x0f 0xdf - invalid */
9016/* Opcode 0xf2 0x0f 0xdf - invalid */
9017
9018/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9019FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9020/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9021FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9022/* Opcode 0xf3 0x0f 0xe0 - invalid */
9023/* Opcode 0xf2 0x0f 0xe0 - invalid */
9024
9025/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9026FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9027/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9028FNIEMOP_STUB(iemOp_psraw_Vx_W);
9029/* Opcode 0xf3 0x0f 0xe1 - invalid */
9030/* Opcode 0xf2 0x0f 0xe1 - invalid */
9031
9032/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9033FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9034/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9035FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9036/* Opcode 0xf3 0x0f 0xe2 - invalid */
9037/* Opcode 0xf2 0x0f 0xe2 - invalid */
9038
9039/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9040FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9041/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9042FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9043/* Opcode 0xf3 0x0f 0xe3 - invalid */
9044/* Opcode 0xf2 0x0f 0xe3 - invalid */
9045
9046/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9047FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9048/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9049FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9050/* Opcode 0xf3 0x0f 0xe4 - invalid */
9051/* Opcode 0xf2 0x0f 0xe4 - invalid */
9052
9053/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9054FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9055/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9056FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9057/* Opcode 0xf3 0x0f 0xe5 - invalid */
9058/* Opcode 0xf2 0x0f 0xe5 - invalid */
9059
9060/* Opcode 0x0f 0xe6 - invalid */
9061/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9062FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9063/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9064FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9065/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9066FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9067
9068
9069/**
9070 * @opcode 0xe7
9071 * @opcodesub !11 mr/reg
9072 * @oppfx none
9073 * @opcpuid sse
9074 * @opgroup og_sse1_cachect
9075 * @opxcpttype none
9076 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9077 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9078 */
9079FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9080{
9081 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9083 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9084 {
9085 /* Register, memory. */
9086 IEM_MC_BEGIN(0, 2);
9087 IEM_MC_LOCAL(uint64_t, uSrc);
9088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9089
9090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9092 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9093 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9094
9095 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9096 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9097 IEM_MC_FPU_TO_MMX_MODE();
9098
9099 IEM_MC_ADVANCE_RIP();
9100 IEM_MC_END();
9101 return VINF_SUCCESS;
9102 }
9103 /**
9104 * @opdone
9105 * @opmnemonic ud0fe7reg
9106 * @opcode 0xe7
9107 * @opcodesub 11 mr/reg
9108 * @oppfx none
9109 * @opunused immediate
9110 * @opcpuid sse
9111 * @optest ->
9112 */
9113 return IEMOP_RAISE_INVALID_OPCODE();
9114}
9115
9116/**
9117 * @opcode 0xe7
9118 * @opcodesub !11 mr/reg
9119 * @oppfx 0x66
9120 * @opcpuid sse2
9121 * @opgroup og_sse2_cachect
9122 * @opxcpttype 1
9123 * @optest op1=-1 op2=2 -> op1=2
9124 * @optest op1=0 op2=-42 -> op1=-42
9125 */
9126FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9127{
9128 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9130 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9131 {
9132 /* Register, memory. */
9133 IEM_MC_BEGIN(0, 2);
9134 IEM_MC_LOCAL(RTUINT128U, uSrc);
9135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9136
9137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9139 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9140 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9141
9142 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9143 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9144
9145 IEM_MC_ADVANCE_RIP();
9146 IEM_MC_END();
9147 return VINF_SUCCESS;
9148 }
9149
9150 /**
9151 * @opdone
9152 * @opmnemonic ud660fe7reg
9153 * @opcode 0xe7
9154 * @opcodesub 11 mr/reg
9155 * @oppfx 0x66
9156 * @opunused immediate
9157 * @opcpuid sse
9158 * @optest ->
9159 */
9160 return IEMOP_RAISE_INVALID_OPCODE();
9161}
9162
9163/* Opcode 0xf3 0x0f 0xe7 - invalid */
9164/* Opcode 0xf2 0x0f 0xe7 - invalid */
9165
9166
9167/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9168FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9169/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9170FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9171/* Opcode 0xf3 0x0f 0xe8 - invalid */
9172/* Opcode 0xf2 0x0f 0xe8 - invalid */
9173
9174/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9175FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9176/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9177FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9178/* Opcode 0xf3 0x0f 0xe9 - invalid */
9179/* Opcode 0xf2 0x0f 0xe9 - invalid */
9180
9181/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9182FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9183/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9184FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9185/* Opcode 0xf3 0x0f 0xea - invalid */
9186/* Opcode 0xf2 0x0f 0xea - invalid */
9187
9188/** Opcode 0x0f 0xeb - por Pq, Qq */
9189FNIEMOP_STUB(iemOp_por_Pq_Qq);
9190/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9191FNIEMOP_STUB(iemOp_por_Vx_W);
9192/* Opcode 0xf3 0x0f 0xeb - invalid */
9193/* Opcode 0xf2 0x0f 0xeb - invalid */
9194
9195/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9196FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9197/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9198FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9199/* Opcode 0xf3 0x0f 0xec - invalid */
9200/* Opcode 0xf2 0x0f 0xec - invalid */
9201
9202/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9203FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9204/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9205FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9206/* Opcode 0xf3 0x0f 0xed - invalid */
9207/* Opcode 0xf2 0x0f 0xed - invalid */
9208
9209/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9210FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9211/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9212FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9213/* Opcode 0xf3 0x0f 0xee - invalid */
9214/* Opcode 0xf2 0x0f 0xee - invalid */
9215
9216
9217/** Opcode 0x0f 0xef - pxor Pq, Qq */
9218FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9219{
9220 IEMOP_MNEMONIC(pxor, "pxor");
9221 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9222}
9223
9224/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9225FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9226{
9227 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9228 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9229}
9230
9231/* Opcode 0xf3 0x0f 0xef - invalid */
9232/* Opcode 0xf2 0x0f 0xef - invalid */
9233
9234/* Opcode 0x0f 0xf0 - invalid */
9235/* Opcode 0x66 0x0f 0xf0 - invalid */
9236/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9237FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9238
9239/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9240FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9241/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9242FNIEMOP_STUB(iemOp_psllw_Vx_W);
9243/* Opcode 0xf2 0x0f 0xf1 - invalid */
9244
9245/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9246FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9247/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9248FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9249/* Opcode 0xf2 0x0f 0xf2 - invalid */
9250
9251/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9252FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9253/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9254FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9255/* Opcode 0xf2 0x0f 0xf3 - invalid */
9256
9257/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9258FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9259/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9260FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9261/* Opcode 0xf2 0x0f 0xf4 - invalid */
9262
9263/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9264FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9265/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9266FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9267/* Opcode 0xf2 0x0f 0xf5 - invalid */
9268
9269/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9270FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9271/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9272FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9273/* Opcode 0xf2 0x0f 0xf6 - invalid */
9274
9275/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9276FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9277/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9278FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9279/* Opcode 0xf2 0x0f 0xf7 - invalid */
9280
9281/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9282FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9283/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9284FNIEMOP_STUB(iemOp_psubb_Vx_W);
9285/* Opcode 0xf2 0x0f 0xf8 - invalid */
9286
9287/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9288FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9289/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9290FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9291/* Opcode 0xf2 0x0f 0xf9 - invalid */
9292
9293/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9294FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9295/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9296FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9297/* Opcode 0xf2 0x0f 0xfa - invalid */
9298
9299/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9300FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9301/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9302FNIEMOP_STUB(iemOp_psubq_Vx_W);
9303/* Opcode 0xf2 0x0f 0xfb - invalid */
9304
9305/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9306FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9307/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9308FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9309/* Opcode 0xf2 0x0f 0xfc - invalid */
9310
9311/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9312FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9313/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9314FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9315/* Opcode 0xf2 0x0f 0xfd - invalid */
9316
9317/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9318FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9319/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9320FNIEMOP_STUB(iemOp_paddd_Vx_W);
9321/* Opcode 0xf2 0x0f 0xfe - invalid */
9322
9323
9324/** Opcode **** 0x0f 0xff - UD0 */
9325FNIEMOP_DEF(iemOp_ud0)
9326{
9327 IEMOP_MNEMONIC(ud0, "ud0");
9328 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9329 {
9330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9331#ifndef TST_IEM_CHECK_MC
9332 RTGCPTR GCPtrEff;
9333 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9334 if (rcStrict != VINF_SUCCESS)
9335 return rcStrict;
9336#endif
9337 IEMOP_HLP_DONE_DECODING();
9338 }
9339 return IEMOP_RAISE_INVALID_OPCODE();
9340}
9341
9342
9343
9344/**
9345 * Two byte opcode map, first byte 0x0f.
9346 *
9347 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9348 * check if it needs updating as well when making changes.
9349 */
9350IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9351{
9352 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9353 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9354 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9355 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9356 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9357 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9358 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9359 /* 0x06 */ IEMOP_X4(iemOp_clts),
9360 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9361 /* 0x08 */ IEMOP_X4(iemOp_invd),
9362 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9363 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9364 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9365 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9366 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9367 /* 0x0e */ IEMOP_X4(iemOp_femms),
9368 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9369
9370 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9371 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9372 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9373 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9374 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9375 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9376 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9377 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9378 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9379 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9380 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9381 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9382 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9383 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9384 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9385 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9386
9387 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9388 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9389 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9390 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9391 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9392 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9393 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9394 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9395 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9396 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9397 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9398 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9399 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9400 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9401 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9402 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9403
9404 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9405 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9406 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9407 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9408 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9409 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9410 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9411 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9412 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9413 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9414 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9415 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9416 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9417 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9418 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9419 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9420
9421 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9422 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9423 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9424 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9425 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9426 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9427 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9428 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9429 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9430 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9431 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9432 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9433 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9434 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9435 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9436 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9437
9438 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9439 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9440 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9441 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9442 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9443 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9444 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9445 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9446 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9447 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9448 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9449 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9450 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9451 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9452 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9453 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9454
9455 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9456 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9457 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9458 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9459 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9460 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9461 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9462 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9463 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9464 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9465 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9466 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9467 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9468 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9469 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9470 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9471
9472 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9473 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9474 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9475 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9476 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9477 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9478 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9479 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9480
9481 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9482 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9483 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9484 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9485 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9486 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9487 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9488 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9489
9490 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9491 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9492 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9493 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9494 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9495 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9496 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9497 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9498 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9499 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9500 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9501 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9502 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9503 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9504 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9505 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9506
9507 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9508 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9509 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9510 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9511 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9512 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9513 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9514 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9515 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9516 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9517 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9518 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9519 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9520 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9521 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9522 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9523
9524 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9525 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9526 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9527 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9528 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9529 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9530 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9531 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9532 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9533 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9534 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9535 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9536 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9537 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9538 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9539 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9540
9541 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9542 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9543 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9544 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9545 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9546 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9547 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9548 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9549 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9550 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9551 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9552 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9553 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9554 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9555 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9556 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9557
9558 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9559 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9560 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9561 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9562 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9563 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9564 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9565 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9566 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9567 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9568 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9569 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9570 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9571 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9572 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9573 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9574
9575 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9576 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9577 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9578 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9579 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9580 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9581 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9582 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9583 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9584 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9585 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9586 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9587 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9588 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9589 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9590 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9591
9592 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9593 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9594 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9595 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9596 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9597 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9598 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9599 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9600 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9601 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9602 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9603 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9604 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9605 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9606 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9607 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9608
9609 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9610 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9611 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9612 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9613 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9614 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9615 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9616 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9617 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9618 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9619 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9620 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9621 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9622 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9623 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9624 /* 0xff */ IEMOP_X4(iemOp_ud0),
9625};
9626AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9627
9628/** @} */
9629
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette