VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 95952

Last change on this file since 95952 was 95952, checked in by vboxsync, 2 years ago

IEM: MMX shifts do not require SSE.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 364.9 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 95952 2022-07-29 16:03:56Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if (IEM_IS_MODRM_REG_MODE(bRm))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
49 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
50 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
51 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
52 IEM_MC_FPU_TO_MMX_MODE();
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 }
56 else
57 {
58 /*
59 * Register, memory.
60 */
61 IEM_MC_BEGIN(2, 2);
62 IEM_MC_ARG(uint64_t *, pDst, 0);
63 IEM_MC_LOCAL(uint64_t, uSrc);
64 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
65 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
66
67 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
68 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
69 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
70 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
71
72 IEM_MC_PREPARE_FPU_USAGE();
73 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
74 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
75 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
76 IEM_MC_FPU_TO_MMX_MODE();
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for MMX instructions on the form:
87 * pxxx mm1, mm2/mem64
88 *
89 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
90 * no FXSAVE state, just the operands.
91 */
92FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
93{
94 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
95 if (IEM_IS_MODRM_REG_MODE(bRm))
96 {
97 /*
98 * Register, register.
99 */
100 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
101 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
103 IEM_MC_BEGIN(2, 0);
104 IEM_MC_ARG(uint64_t *, pDst, 0);
105 IEM_MC_ARG(uint64_t const *, pSrc, 1);
106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
107 IEM_MC_PREPARE_FPU_USAGE();
108 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
109 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
110 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
111 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
112 IEM_MC_FPU_TO_MMX_MODE();
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 }
116 else
117 {
118 /*
119 * Register, memory.
120 */
121 IEM_MC_BEGIN(2, 2);
122 IEM_MC_ARG(uint64_t *, pDst, 0);
123 IEM_MC_LOCAL(uint64_t, uSrc);
124 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
126
127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
130 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
131
132 IEM_MC_PREPARE_FPU_USAGE();
133 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
134 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
135 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
136 IEM_MC_FPU_TO_MMX_MODE();
137
138 IEM_MC_ADVANCE_RIP();
139 IEM_MC_END();
140 }
141 return VINF_SUCCESS;
142}
143
144
145/**
146 * Common worker for MMX instructions on the form:
147 * pxxx mm1, mm2/mem64
148 * that was introduced with SSE2.
149 */
150FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
151{
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
153 if (IEM_IS_MODRM_REG_MODE(bRm))
154 {
155 /*
156 * Register, register.
157 */
158 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
159 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
161 IEM_MC_BEGIN(2, 0);
162 IEM_MC_ARG(uint64_t *, pDst, 0);
163 IEM_MC_ARG(uint64_t const *, pSrc, 1);
164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
165 IEM_MC_PREPARE_FPU_USAGE();
166 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
167 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
168 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
169 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
170 IEM_MC_FPU_TO_MMX_MODE();
171 IEM_MC_ADVANCE_RIP();
172 IEM_MC_END();
173 }
174 else
175 {
176 /*
177 * Register, memory.
178 */
179 IEM_MC_BEGIN(2, 2);
180 IEM_MC_ARG(uint64_t *, pDst, 0);
181 IEM_MC_LOCAL(uint64_t, uSrc);
182 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
184
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
188 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
189
190 IEM_MC_PREPARE_FPU_USAGE();
191 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
192 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
193 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
194 IEM_MC_FPU_TO_MMX_MODE();
195
196 IEM_MC_ADVANCE_RIP();
197 IEM_MC_END();
198 }
199 return VINF_SUCCESS;
200}
201
202
203/**
204 * Common worker for SSE2 instructions on the forms:
205 * pxxx xmm1, xmm2/mem128
206 *
207 * Proper alignment of the 128-bit operand is enforced.
208 * Exceptions type 4. SSE2 cpuid checks.
209 *
210 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
211 */
212FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
213{
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
215 if (IEM_IS_MODRM_REG_MODE(bRm))
216 {
217 /*
218 * Register, register.
219 */
220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
221 IEM_MC_BEGIN(2, 0);
222 IEM_MC_ARG(PRTUINT128U, pDst, 0);
223 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
225 IEM_MC_PREPARE_SSE_USAGE();
226 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
227 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
228 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
229 IEM_MC_ADVANCE_RIP();
230 IEM_MC_END();
231 }
232 else
233 {
234 /*
235 * Register, memory.
236 */
237 IEM_MC_BEGIN(2, 2);
238 IEM_MC_ARG(PRTUINT128U, pDst, 0);
239 IEM_MC_LOCAL(RTUINT128U, uSrc);
240 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
242
243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
245 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
246 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
247
248 IEM_MC_PREPARE_SSE_USAGE();
249 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
250 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
251
252 IEM_MC_ADVANCE_RIP();
253 IEM_MC_END();
254 }
255 return VINF_SUCCESS;
256}
257
258
259/**
260 * Common worker for SSE2 instructions on the forms:
261 * pxxx xmm1, xmm2/mem128
262 *
263 * Proper alignment of the 128-bit operand is enforced.
264 * Exceptions type 4. SSE2 cpuid checks.
265 *
266 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
267 * no FXSAVE state, just the operands.
268 *
269 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
270 */
271FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
272{
273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
274 if (IEM_IS_MODRM_REG_MODE(bRm))
275 {
276 /*
277 * Register, register.
278 */
279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
280 IEM_MC_BEGIN(2, 0);
281 IEM_MC_ARG(PRTUINT128U, pDst, 0);
282 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
283 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
284 IEM_MC_PREPARE_SSE_USAGE();
285 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
286 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
287 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
288 IEM_MC_ADVANCE_RIP();
289 IEM_MC_END();
290 }
291 else
292 {
293 /*
294 * Register, memory.
295 */
296 IEM_MC_BEGIN(2, 2);
297 IEM_MC_ARG(PRTUINT128U, pDst, 0);
298 IEM_MC_LOCAL(RTUINT128U, uSrc);
299 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
301
302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
304 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
305 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
306
307 IEM_MC_PREPARE_SSE_USAGE();
308 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
309 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
310
311 IEM_MC_ADVANCE_RIP();
312 IEM_MC_END();
313 }
314 return VINF_SUCCESS;
315}
316
317
318/** Opcode 0x0f 0x00 /0. */
319FNIEMOPRM_DEF(iemOp_Grp6_sldt)
320{
321 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
322 IEMOP_HLP_MIN_286();
323 IEMOP_HLP_NO_REAL_OR_V86_MODE();
324
325 if (IEM_IS_MODRM_REG_MODE(bRm))
326 {
327 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
328 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
329 }
330
331 /* Ignore operand size here, memory refs are always 16-bit. */
332 IEM_MC_BEGIN(2, 0);
333 IEM_MC_ARG(uint16_t, iEffSeg, 0);
334 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
336 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
337 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
338 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
339 IEM_MC_END();
340 return VINF_SUCCESS;
341}
342
343
344/** Opcode 0x0f 0x00 /1. */
345FNIEMOPRM_DEF(iemOp_Grp6_str)
346{
347 IEMOP_MNEMONIC(str, "str Rv/Mw");
348 IEMOP_HLP_MIN_286();
349 IEMOP_HLP_NO_REAL_OR_V86_MODE();
350
351
352 if (IEM_IS_MODRM_REG_MODE(bRm))
353 {
354 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
355 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
356 }
357
358 /* Ignore operand size here, memory refs are always 16-bit. */
359 IEM_MC_BEGIN(2, 0);
360 IEM_MC_ARG(uint16_t, iEffSeg, 0);
361 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
363 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
364 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
365 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
366 IEM_MC_END();
367 return VINF_SUCCESS;
368}
369
370
371/** Opcode 0x0f 0x00 /2. */
372FNIEMOPRM_DEF(iemOp_Grp6_lldt)
373{
374 IEMOP_MNEMONIC(lldt, "lldt Ew");
375 IEMOP_HLP_MIN_286();
376 IEMOP_HLP_NO_REAL_OR_V86_MODE();
377
378 if (IEM_IS_MODRM_REG_MODE(bRm))
379 {
380 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
381 IEM_MC_BEGIN(1, 0);
382 IEM_MC_ARG(uint16_t, u16Sel, 0);
383 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
384 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
385 IEM_MC_END();
386 }
387 else
388 {
389 IEM_MC_BEGIN(1, 1);
390 IEM_MC_ARG(uint16_t, u16Sel, 0);
391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
393 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
394 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
395 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
396 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
397 IEM_MC_END();
398 }
399 return VINF_SUCCESS;
400}
401
402
403/** Opcode 0x0f 0x00 /3. */
404FNIEMOPRM_DEF(iemOp_Grp6_ltr)
405{
406 IEMOP_MNEMONIC(ltr, "ltr Ew");
407 IEMOP_HLP_MIN_286();
408 IEMOP_HLP_NO_REAL_OR_V86_MODE();
409
410 if (IEM_IS_MODRM_REG_MODE(bRm))
411 {
412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
413 IEM_MC_BEGIN(1, 0);
414 IEM_MC_ARG(uint16_t, u16Sel, 0);
415 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
416 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
417 IEM_MC_END();
418 }
419 else
420 {
421 IEM_MC_BEGIN(1, 1);
422 IEM_MC_ARG(uint16_t, u16Sel, 0);
423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
426 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
427 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
428 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
429 IEM_MC_END();
430 }
431 return VINF_SUCCESS;
432}
433
434
435/** Opcode 0x0f 0x00 /3. */
436FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
437{
438 IEMOP_HLP_MIN_286();
439 IEMOP_HLP_NO_REAL_OR_V86_MODE();
440
441 if (IEM_IS_MODRM_REG_MODE(bRm))
442 {
443 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
444 IEM_MC_BEGIN(2, 0);
445 IEM_MC_ARG(uint16_t, u16Sel, 0);
446 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
447 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
448 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
449 IEM_MC_END();
450 }
451 else
452 {
453 IEM_MC_BEGIN(2, 1);
454 IEM_MC_ARG(uint16_t, u16Sel, 0);
455 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
458 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
459 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
460 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
461 IEM_MC_END();
462 }
463 return VINF_SUCCESS;
464}
465
466
467/** Opcode 0x0f 0x00 /4. */
468FNIEMOPRM_DEF(iemOp_Grp6_verr)
469{
470 IEMOP_MNEMONIC(verr, "verr Ew");
471 IEMOP_HLP_MIN_286();
472 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
473}
474
475
476/** Opcode 0x0f 0x00 /5. */
477FNIEMOPRM_DEF(iemOp_Grp6_verw)
478{
479 IEMOP_MNEMONIC(verw, "verw Ew");
480 IEMOP_HLP_MIN_286();
481 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
482}
483
484
485/**
486 * Group 6 jump table.
487 */
488IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
489{
490 iemOp_Grp6_sldt,
491 iemOp_Grp6_str,
492 iemOp_Grp6_lldt,
493 iemOp_Grp6_ltr,
494 iemOp_Grp6_verr,
495 iemOp_Grp6_verw,
496 iemOp_InvalidWithRM,
497 iemOp_InvalidWithRM
498};
499
500/** Opcode 0x0f 0x00. */
501FNIEMOP_DEF(iemOp_Grp6)
502{
503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
504 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
505}
506
507
508/** Opcode 0x0f 0x01 /0. */
509FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
510{
511 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
512 IEMOP_HLP_MIN_286();
513 IEMOP_HLP_64BIT_OP_SIZE();
514 IEM_MC_BEGIN(2, 1);
515 IEM_MC_ARG(uint8_t, iEffSeg, 0);
516 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
519 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
520 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
521 IEM_MC_END();
522 return VINF_SUCCESS;
523}
524
525
526/** Opcode 0x0f 0x01 /0. */
527FNIEMOP_DEF(iemOp_Grp7_vmcall)
528{
529 IEMOP_MNEMONIC(vmcall, "vmcall");
530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
531
532 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
533 want all hypercalls regardless of instruction used, and if a
534 hypercall isn't handled by GIM or HMSvm will raise an #UD.
535 (NEM/win makes ASSUMPTIONS about this behavior.) */
536 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
537}
538
539
540/** Opcode 0x0f 0x01 /0. */
541#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
542FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
543{
544 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
545 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
546 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
547 IEMOP_HLP_DONE_DECODING();
548 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
549}
550#else
551FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
552{
553 IEMOP_BITCH_ABOUT_STUB();
554 return IEMOP_RAISE_INVALID_OPCODE();
555}
556#endif
557
558
559/** Opcode 0x0f 0x01 /0. */
560#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
561FNIEMOP_DEF(iemOp_Grp7_vmresume)
562{
563 IEMOP_MNEMONIC(vmresume, "vmresume");
564 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
565 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
566 IEMOP_HLP_DONE_DECODING();
567 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
568}
569#else
570FNIEMOP_DEF(iemOp_Grp7_vmresume)
571{
572 IEMOP_BITCH_ABOUT_STUB();
573 return IEMOP_RAISE_INVALID_OPCODE();
574}
575#endif
576
577
578/** Opcode 0x0f 0x01 /0. */
579#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
580FNIEMOP_DEF(iemOp_Grp7_vmxoff)
581{
582 IEMOP_MNEMONIC(vmxoff, "vmxoff");
583 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
584 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
585 IEMOP_HLP_DONE_DECODING();
586 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
587}
588#else
589FNIEMOP_DEF(iemOp_Grp7_vmxoff)
590{
591 IEMOP_BITCH_ABOUT_STUB();
592 return IEMOP_RAISE_INVALID_OPCODE();
593}
594#endif
595
596
597/** Opcode 0x0f 0x01 /1. */
598FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC(sidt, "sidt Ms");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_64BIT_OP_SIZE();
603 IEM_MC_BEGIN(2, 1);
604 IEM_MC_ARG(uint8_t, iEffSeg, 0);
605 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
608 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
609 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
610 IEM_MC_END();
611 return VINF_SUCCESS;
612}
613
614
615/** Opcode 0x0f 0x01 /1. */
616FNIEMOP_DEF(iemOp_Grp7_monitor)
617{
618 IEMOP_MNEMONIC(monitor, "monitor");
619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
620 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
621}
622
623
624/** Opcode 0x0f 0x01 /1. */
625FNIEMOP_DEF(iemOp_Grp7_mwait)
626{
627 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
629 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
630}
631
632
633/** Opcode 0x0f 0x01 /2. */
634FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
635{
636 IEMOP_MNEMONIC(lgdt, "lgdt");
637 IEMOP_HLP_64BIT_OP_SIZE();
638 IEM_MC_BEGIN(3, 1);
639 IEM_MC_ARG(uint8_t, iEffSeg, 0);
640 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
641 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
644 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
645 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
646 IEM_MC_END();
647 return VINF_SUCCESS;
648}
649
650
651/** Opcode 0x0f 0x01 0xd0. */
652FNIEMOP_DEF(iemOp_Grp7_xgetbv)
653{
654 IEMOP_MNEMONIC(xgetbv, "xgetbv");
655 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
656 {
657 /** @todo r=ramshankar: We should use
658 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
659 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
660 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
661 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
662 }
663 return IEMOP_RAISE_INVALID_OPCODE();
664}
665
666
667/** Opcode 0x0f 0x01 0xd1. */
668FNIEMOP_DEF(iemOp_Grp7_xsetbv)
669{
670 IEMOP_MNEMONIC(xsetbv, "xsetbv");
671 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
672 {
673 /** @todo r=ramshankar: We should use
674 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
675 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
676 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
678 }
679 return IEMOP_RAISE_INVALID_OPCODE();
680}
681
682
683/** Opcode 0x0f 0x01 /3. */
684FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
685{
686 IEMOP_MNEMONIC(lidt, "lidt");
687 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
688 ? IEMMODE_64BIT
689 : pVCpu->iem.s.enmEffOpSize;
690 IEM_MC_BEGIN(3, 1);
691 IEM_MC_ARG(uint8_t, iEffSeg, 0);
692 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
693 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
696 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
697 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
698 IEM_MC_END();
699 return VINF_SUCCESS;
700}
701
702
703/** Opcode 0x0f 0x01 0xd8. */
704#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
705FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
706{
707 IEMOP_MNEMONIC(vmrun, "vmrun");
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
709 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
710}
711#else
712FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
713#endif
714
715/** Opcode 0x0f 0x01 0xd9. */
716FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
717{
718 IEMOP_MNEMONIC(vmmcall, "vmmcall");
719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
720
721 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
722 want all hypercalls regardless of instruction used, and if a
723 hypercall isn't handled by GIM or HMSvm will raise an #UD.
724 (NEM/win makes ASSUMPTIONS about this behavior.) */
725 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
726}
727
728/** Opcode 0x0f 0x01 0xda. */
729#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
730FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
731{
732 IEMOP_MNEMONIC(vmload, "vmload");
733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
734 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
735}
736#else
737FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
738#endif
739
740
741/** Opcode 0x0f 0x01 0xdb. */
742#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
743FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
744{
745 IEMOP_MNEMONIC(vmsave, "vmsave");
746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
747 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
748}
749#else
750FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
751#endif
752
753
754/** Opcode 0x0f 0x01 0xdc. */
755#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
756FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
757{
758 IEMOP_MNEMONIC(stgi, "stgi");
759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
760 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
761}
762#else
763FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
764#endif
765
766
767/** Opcode 0x0f 0x01 0xdd. */
768#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
769FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
770{
771 IEMOP_MNEMONIC(clgi, "clgi");
772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
773 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
774}
775#else
776FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
777#endif
778
779
780/** Opcode 0x0f 0x01 0xdf. */
781#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
782FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
783{
784 IEMOP_MNEMONIC(invlpga, "invlpga");
785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
786 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
787}
788#else
789FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
790#endif
791
792
793/** Opcode 0x0f 0x01 0xde. */
794#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
795FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
796{
797 IEMOP_MNEMONIC(skinit, "skinit");
798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
799 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
800}
801#else
802FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
803#endif
804
805
806/** Opcode 0x0f 0x01 /4. */
807FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
808{
809 IEMOP_MNEMONIC(smsw, "smsw");
810 IEMOP_HLP_MIN_286();
811 if (IEM_IS_MODRM_REG_MODE(bRm))
812 {
813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
814 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
815 }
816
817 /* Ignore operand size here, memory refs are always 16-bit. */
818 IEM_MC_BEGIN(2, 0);
819 IEM_MC_ARG(uint16_t, iEffSeg, 0);
820 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
823 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
824 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
825 IEM_MC_END();
826 return VINF_SUCCESS;
827}
828
829
830/** Opcode 0x0f 0x01 /6. */
831FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
832{
833 /* The operand size is effectively ignored, all is 16-bit and only the
834 lower 3-bits are used. */
835 IEMOP_MNEMONIC(lmsw, "lmsw");
836 IEMOP_HLP_MIN_286();
837 if (IEM_IS_MODRM_REG_MODE(bRm))
838 {
839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
840 IEM_MC_BEGIN(2, 0);
841 IEM_MC_ARG(uint16_t, u16Tmp, 0);
842 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
843 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
844 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
845 IEM_MC_END();
846 }
847 else
848 {
849 IEM_MC_BEGIN(2, 0);
850 IEM_MC_ARG(uint16_t, u16Tmp, 0);
851 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
854 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
855 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
856 IEM_MC_END();
857 }
858 return VINF_SUCCESS;
859}
860
861
862/** Opcode 0x0f 0x01 /7. */
863FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
864{
865 IEMOP_MNEMONIC(invlpg, "invlpg");
866 IEMOP_HLP_MIN_486();
867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
868 IEM_MC_BEGIN(1, 1);
869 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
871 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
872 IEM_MC_END();
873 return VINF_SUCCESS;
874}
875
876
877/** Opcode 0x0f 0x01 /7. */
878FNIEMOP_DEF(iemOp_Grp7_swapgs)
879{
880 IEMOP_MNEMONIC(swapgs, "swapgs");
881 IEMOP_HLP_ONLY_64BIT();
882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
883 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
884}
885
886
887/** Opcode 0x0f 0x01 /7. */
888FNIEMOP_DEF(iemOp_Grp7_rdtscp)
889{
890 IEMOP_MNEMONIC(rdtscp, "rdtscp");
891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
892 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
893}
894
895
896/**
897 * Group 7 jump table, memory variant.
898 */
899IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
900{
901 iemOp_Grp7_sgdt,
902 iemOp_Grp7_sidt,
903 iemOp_Grp7_lgdt,
904 iemOp_Grp7_lidt,
905 iemOp_Grp7_smsw,
906 iemOp_InvalidWithRM,
907 iemOp_Grp7_lmsw,
908 iemOp_Grp7_invlpg
909};
910
911
912/** Opcode 0x0f 0x01. */
913FNIEMOP_DEF(iemOp_Grp7)
914{
915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
916 if (IEM_IS_MODRM_MEM_MODE(bRm))
917 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
918
919 switch (IEM_GET_MODRM_REG_8(bRm))
920 {
921 case 0:
922 switch (IEM_GET_MODRM_RM_8(bRm))
923 {
924 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
925 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
926 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
927 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
928 }
929 return IEMOP_RAISE_INVALID_OPCODE();
930
931 case 1:
932 switch (IEM_GET_MODRM_RM_8(bRm))
933 {
934 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
935 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
936 }
937 return IEMOP_RAISE_INVALID_OPCODE();
938
939 case 2:
940 switch (IEM_GET_MODRM_RM_8(bRm))
941 {
942 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
943 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
944 }
945 return IEMOP_RAISE_INVALID_OPCODE();
946
947 case 3:
948 switch (IEM_GET_MODRM_RM_8(bRm))
949 {
950 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
951 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
952 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
953 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
954 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
955 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
956 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
957 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
959 }
960
961 case 4:
962 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
963
964 case 5:
965 return IEMOP_RAISE_INVALID_OPCODE();
966
967 case 6:
968 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
969
970 case 7:
971 switch (IEM_GET_MODRM_RM_8(bRm))
972 {
973 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
974 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
975 }
976 return IEMOP_RAISE_INVALID_OPCODE();
977
978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
979 }
980}
981
982/** Opcode 0x0f 0x00 /3. */
983FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
984{
985 IEMOP_HLP_NO_REAL_OR_V86_MODE();
986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
987
988 if (IEM_IS_MODRM_REG_MODE(bRm))
989 {
990 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
991 switch (pVCpu->iem.s.enmEffOpSize)
992 {
993 case IEMMODE_16BIT:
994 {
995 IEM_MC_BEGIN(3, 0);
996 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
997 IEM_MC_ARG(uint16_t, u16Sel, 1);
998 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
999
1000 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1001 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1002 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1003
1004 IEM_MC_END();
1005 return VINF_SUCCESS;
1006 }
1007
1008 case IEMMODE_32BIT:
1009 case IEMMODE_64BIT:
1010 {
1011 IEM_MC_BEGIN(3, 0);
1012 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1013 IEM_MC_ARG(uint16_t, u16Sel, 1);
1014 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1015
1016 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1017 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1018 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1019
1020 IEM_MC_END();
1021 return VINF_SUCCESS;
1022 }
1023
1024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1025 }
1026 }
1027 else
1028 {
1029 switch (pVCpu->iem.s.enmEffOpSize)
1030 {
1031 case IEMMODE_16BIT:
1032 {
1033 IEM_MC_BEGIN(3, 1);
1034 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1035 IEM_MC_ARG(uint16_t, u16Sel, 1);
1036 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1038
1039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1040 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1041
1042 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1043 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1044 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1045
1046 IEM_MC_END();
1047 return VINF_SUCCESS;
1048 }
1049
1050 case IEMMODE_32BIT:
1051 case IEMMODE_64BIT:
1052 {
1053 IEM_MC_BEGIN(3, 1);
1054 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1055 IEM_MC_ARG(uint16_t, u16Sel, 1);
1056 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1058
1059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1060 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1061/** @todo testcase: make sure it's a 16-bit read. */
1062
1063 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1064 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1065 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1066
1067 IEM_MC_END();
1068 return VINF_SUCCESS;
1069 }
1070
1071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1072 }
1073 }
1074}
1075
1076
1077
1078/** Opcode 0x0f 0x02. */
1079FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1080{
1081 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1082 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1083}
1084
1085
1086/** Opcode 0x0f 0x03. */
1087FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1088{
1089 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1090 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1091}
1092
1093
1094/** Opcode 0x0f 0x05. */
1095FNIEMOP_DEF(iemOp_syscall)
1096{
1097 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1099 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1100}
1101
1102
1103/** Opcode 0x0f 0x06. */
1104FNIEMOP_DEF(iemOp_clts)
1105{
1106 IEMOP_MNEMONIC(clts, "clts");
1107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1108 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1109}
1110
1111
1112/** Opcode 0x0f 0x07. */
1113FNIEMOP_DEF(iemOp_sysret)
1114{
1115 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1117 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1118}
1119
1120
1121/** Opcode 0x0f 0x08. */
1122FNIEMOP_DEF(iemOp_invd)
1123{
1124 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1125 IEMOP_HLP_MIN_486();
1126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1127 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1128}
1129
1130
1131/** Opcode 0x0f 0x09. */
1132FNIEMOP_DEF(iemOp_wbinvd)
1133{
1134 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1135 IEMOP_HLP_MIN_486();
1136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1137 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1138}
1139
1140
1141/** Opcode 0x0f 0x0b. */
1142FNIEMOP_DEF(iemOp_ud2)
1143{
1144 IEMOP_MNEMONIC(ud2, "ud2");
1145 return IEMOP_RAISE_INVALID_OPCODE();
1146}
1147
1148/** Opcode 0x0f 0x0d. */
1149FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1150{
1151 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1152 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1153 {
1154 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1155 return IEMOP_RAISE_INVALID_OPCODE();
1156 }
1157
1158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1159 if (IEM_IS_MODRM_REG_MODE(bRm))
1160 {
1161 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1162 return IEMOP_RAISE_INVALID_OPCODE();
1163 }
1164
1165 switch (IEM_GET_MODRM_REG_8(bRm))
1166 {
1167 case 2: /* Aliased to /0 for the time being. */
1168 case 4: /* Aliased to /0 for the time being. */
1169 case 5: /* Aliased to /0 for the time being. */
1170 case 6: /* Aliased to /0 for the time being. */
1171 case 7: /* Aliased to /0 for the time being. */
1172 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1173 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1174 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1176 }
1177
1178 IEM_MC_BEGIN(0, 1);
1179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1182 /* Currently a NOP. */
1183 NOREF(GCPtrEffSrc);
1184 IEM_MC_ADVANCE_RIP();
1185 IEM_MC_END();
1186 return VINF_SUCCESS;
1187}
1188
1189
1190/** Opcode 0x0f 0x0e. */
1191FNIEMOP_DEF(iemOp_femms)
1192{
1193 IEMOP_MNEMONIC(femms, "femms");
1194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1195
1196 IEM_MC_BEGIN(0,0);
1197 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1198 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1199 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1200 IEM_MC_FPU_FROM_MMX_MODE();
1201 IEM_MC_ADVANCE_RIP();
1202 IEM_MC_END();
1203 return VINF_SUCCESS;
1204}
1205
1206
1207/** Opcode 0x0f 0x0f. */
1208FNIEMOP_DEF(iemOp_3Dnow)
1209{
1210 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1211 {
1212 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1213 return IEMOP_RAISE_INVALID_OPCODE();
1214 }
1215
1216#ifdef IEM_WITH_3DNOW
1217 /* This is pretty sparse, use switch instead of table. */
1218 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1219 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1220#else
1221 IEMOP_BITCH_ABOUT_STUB();
1222 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1223#endif
1224}
1225
1226
1227/**
1228 * @opcode 0x10
1229 * @oppfx none
1230 * @opcpuid sse
1231 * @opgroup og_sse_simdfp_datamove
1232 * @opxcpttype 4UA
1233 * @optest op1=1 op2=2 -> op1=2
1234 * @optest op1=0 op2=-22 -> op1=-22
1235 */
1236FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1237{
1238 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1240 if (IEM_IS_MODRM_REG_MODE(bRm))
1241 {
1242 /*
1243 * Register, register.
1244 */
1245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1246 IEM_MC_BEGIN(0, 0);
1247 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1248 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1249 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1250 IEM_GET_MODRM_RM(pVCpu, bRm));
1251 IEM_MC_ADVANCE_RIP();
1252 IEM_MC_END();
1253 }
1254 else
1255 {
1256 /*
1257 * Memory, register.
1258 */
1259 IEM_MC_BEGIN(0, 2);
1260 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1262
1263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1266 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1267
1268 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1269 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1270
1271 IEM_MC_ADVANCE_RIP();
1272 IEM_MC_END();
1273 }
1274 return VINF_SUCCESS;
1275
1276}
1277
1278
1279/**
1280 * @opcode 0x10
1281 * @oppfx 0x66
1282 * @opcpuid sse2
1283 * @opgroup og_sse2_pcksclr_datamove
1284 * @opxcpttype 4UA
1285 * @optest op1=1 op2=2 -> op1=2
1286 * @optest op1=0 op2=-42 -> op1=-42
1287 */
1288FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1289{
1290 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1292 if (IEM_IS_MODRM_REG_MODE(bRm))
1293 {
1294 /*
1295 * Register, register.
1296 */
1297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1298 IEM_MC_BEGIN(0, 0);
1299 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1300 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1301 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1302 IEM_GET_MODRM_RM(pVCpu, bRm));
1303 IEM_MC_ADVANCE_RIP();
1304 IEM_MC_END();
1305 }
1306 else
1307 {
1308 /*
1309 * Memory, register.
1310 */
1311 IEM_MC_BEGIN(0, 2);
1312 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1314
1315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1317 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1318 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1319
1320 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1321 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1322
1323 IEM_MC_ADVANCE_RIP();
1324 IEM_MC_END();
1325 }
1326 return VINF_SUCCESS;
1327}
1328
1329
1330/**
1331 * @opcode 0x10
1332 * @oppfx 0xf3
1333 * @opcpuid sse
1334 * @opgroup og_sse_simdfp_datamove
1335 * @opxcpttype 5
1336 * @optest op1=1 op2=2 -> op1=2
1337 * @optest op1=0 op2=-22 -> op1=-22
1338 */
1339FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1340{
1341 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1343 if (IEM_IS_MODRM_REG_MODE(bRm))
1344 {
1345 /*
1346 * Register, register.
1347 */
1348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1349 IEM_MC_BEGIN(0, 1);
1350 IEM_MC_LOCAL(uint32_t, uSrc);
1351
1352 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1353 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1354 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1355 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1356
1357 IEM_MC_ADVANCE_RIP();
1358 IEM_MC_END();
1359 }
1360 else
1361 {
1362 /*
1363 * Memory, register.
1364 */
1365 IEM_MC_BEGIN(0, 2);
1366 IEM_MC_LOCAL(uint32_t, uSrc);
1367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1368
1369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1371 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1372 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1373
1374 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1375 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1376
1377 IEM_MC_ADVANCE_RIP();
1378 IEM_MC_END();
1379 }
1380 return VINF_SUCCESS;
1381}
1382
1383
1384/**
1385 * @opcode 0x10
1386 * @oppfx 0xf2
1387 * @opcpuid sse2
1388 * @opgroup og_sse2_pcksclr_datamove
1389 * @opxcpttype 5
1390 * @optest op1=1 op2=2 -> op1=2
1391 * @optest op1=0 op2=-42 -> op1=-42
1392 */
1393FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1394{
1395 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1397 if (IEM_IS_MODRM_REG_MODE(bRm))
1398 {
1399 /*
1400 * Register, register.
1401 */
1402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1403 IEM_MC_BEGIN(0, 1);
1404 IEM_MC_LOCAL(uint64_t, uSrc);
1405
1406 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1407 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1408 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1409 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1410
1411 IEM_MC_ADVANCE_RIP();
1412 IEM_MC_END();
1413 }
1414 else
1415 {
1416 /*
1417 * Memory, register.
1418 */
1419 IEM_MC_BEGIN(0, 2);
1420 IEM_MC_LOCAL(uint64_t, uSrc);
1421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1422
1423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1425 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1426 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1427
1428 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1429 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1430
1431 IEM_MC_ADVANCE_RIP();
1432 IEM_MC_END();
1433 }
1434 return VINF_SUCCESS;
1435}
1436
1437
1438/**
1439 * @opcode 0x11
1440 * @oppfx none
1441 * @opcpuid sse
1442 * @opgroup og_sse_simdfp_datamove
1443 * @opxcpttype 4UA
1444 * @optest op1=1 op2=2 -> op1=2
1445 * @optest op1=0 op2=-42 -> op1=-42
1446 */
1447FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1448{
1449 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1451 if (IEM_IS_MODRM_REG_MODE(bRm))
1452 {
1453 /*
1454 * Register, register.
1455 */
1456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1457 IEM_MC_BEGIN(0, 0);
1458 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1459 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1460 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
1461 IEM_GET_MODRM_REG(pVCpu, bRm));
1462 IEM_MC_ADVANCE_RIP();
1463 IEM_MC_END();
1464 }
1465 else
1466 {
1467 /*
1468 * Memory, register.
1469 */
1470 IEM_MC_BEGIN(0, 2);
1471 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1473
1474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1478
1479 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1480 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1481
1482 IEM_MC_ADVANCE_RIP();
1483 IEM_MC_END();
1484 }
1485 return VINF_SUCCESS;
1486}
1487
1488
1489/**
1490 * @opcode 0x11
1491 * @oppfx 0x66
1492 * @opcpuid sse2
1493 * @opgroup og_sse2_pcksclr_datamove
1494 * @opxcpttype 4UA
1495 * @optest op1=1 op2=2 -> op1=2
1496 * @optest op1=0 op2=-42 -> op1=-42
1497 */
1498FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1499{
1500 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1502 if (IEM_IS_MODRM_REG_MODE(bRm))
1503 {
1504 /*
1505 * Register, register.
1506 */
1507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1508 IEM_MC_BEGIN(0, 0);
1509 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1510 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1511 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
1512 IEM_GET_MODRM_REG(pVCpu, bRm));
1513 IEM_MC_ADVANCE_RIP();
1514 IEM_MC_END();
1515 }
1516 else
1517 {
1518 /*
1519 * Memory, register.
1520 */
1521 IEM_MC_BEGIN(0, 2);
1522 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1524
1525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1527 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1529
1530 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1531 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1532
1533 IEM_MC_ADVANCE_RIP();
1534 IEM_MC_END();
1535 }
1536 return VINF_SUCCESS;
1537}
1538
1539
1540/**
1541 * @opcode 0x11
1542 * @oppfx 0xf3
1543 * @opcpuid sse
1544 * @opgroup og_sse_simdfp_datamove
1545 * @opxcpttype 5
1546 * @optest op1=1 op2=2 -> op1=2
1547 * @optest op1=0 op2=-22 -> op1=-22
1548 */
1549FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1550{
1551 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1553 if (IEM_IS_MODRM_REG_MODE(bRm))
1554 {
1555 /*
1556 * Register, register.
1557 */
1558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1559 IEM_MC_BEGIN(0, 1);
1560 IEM_MC_LOCAL(uint32_t, uSrc);
1561
1562 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1563 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1564 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1565 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
1566
1567 IEM_MC_ADVANCE_RIP();
1568 IEM_MC_END();
1569 }
1570 else
1571 {
1572 /*
1573 * Memory, register.
1574 */
1575 IEM_MC_BEGIN(0, 2);
1576 IEM_MC_LOCAL(uint32_t, uSrc);
1577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1578
1579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1581 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1582 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1583
1584 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1585 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1586
1587 IEM_MC_ADVANCE_RIP();
1588 IEM_MC_END();
1589 }
1590 return VINF_SUCCESS;
1591}
1592
1593
1594/**
1595 * @opcode 0x11
1596 * @oppfx 0xf2
1597 * @opcpuid sse2
1598 * @opgroup og_sse2_pcksclr_datamove
1599 * @opxcpttype 5
1600 * @optest op1=1 op2=2 -> op1=2
1601 * @optest op1=0 op2=-42 -> op1=-42
1602 */
1603FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1604{
1605 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1607 if (IEM_IS_MODRM_REG_MODE(bRm))
1608 {
1609 /*
1610 * Register, register.
1611 */
1612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1613 IEM_MC_BEGIN(0, 1);
1614 IEM_MC_LOCAL(uint64_t, uSrc);
1615
1616 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1617 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1618 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1619 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
1620
1621 IEM_MC_ADVANCE_RIP();
1622 IEM_MC_END();
1623 }
1624 else
1625 {
1626 /*
1627 * Memory, register.
1628 */
1629 IEM_MC_BEGIN(0, 2);
1630 IEM_MC_LOCAL(uint64_t, uSrc);
1631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1632
1633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1635 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1637
1638 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1639 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1640
1641 IEM_MC_ADVANCE_RIP();
1642 IEM_MC_END();
1643 }
1644 return VINF_SUCCESS;
1645}
1646
1647
1648FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1649{
1650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1651 if (IEM_IS_MODRM_REG_MODE(bRm))
1652 {
1653 /**
1654 * @opcode 0x12
1655 * @opcodesub 11 mr/reg
1656 * @oppfx none
1657 * @opcpuid sse
1658 * @opgroup og_sse_simdfp_datamove
1659 * @opxcpttype 5
1660 * @optest op1=1 op2=2 -> op1=2
1661 * @optest op1=0 op2=-42 -> op1=-42
1662 */
1663 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1664
1665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1666 IEM_MC_BEGIN(0, 1);
1667 IEM_MC_LOCAL(uint64_t, uSrc);
1668
1669 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1670 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1671 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1672 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1673
1674 IEM_MC_ADVANCE_RIP();
1675 IEM_MC_END();
1676 }
1677 else
1678 {
1679 /**
1680 * @opdone
1681 * @opcode 0x12
1682 * @opcodesub !11 mr/reg
1683 * @oppfx none
1684 * @opcpuid sse
1685 * @opgroup og_sse_simdfp_datamove
1686 * @opxcpttype 5
1687 * @optest op1=1 op2=2 -> op1=2
1688 * @optest op1=0 op2=-42 -> op1=-42
1689 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1690 */
1691 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1692
1693 IEM_MC_BEGIN(0, 2);
1694 IEM_MC_LOCAL(uint64_t, uSrc);
1695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1696
1697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1699 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1700 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1701
1702 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1703 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1704
1705 IEM_MC_ADVANCE_RIP();
1706 IEM_MC_END();
1707 }
1708 return VINF_SUCCESS;
1709}
1710
1711
1712/**
1713 * @opcode 0x12
1714 * @opcodesub !11 mr/reg
1715 * @oppfx 0x66
1716 * @opcpuid sse2
1717 * @opgroup og_sse2_pcksclr_datamove
1718 * @opxcpttype 5
1719 * @optest op1=1 op2=2 -> op1=2
1720 * @optest op1=0 op2=-42 -> op1=-42
1721 */
1722FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1723{
1724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1725 if (IEM_IS_MODRM_MEM_MODE(bRm))
1726 {
1727 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1728
1729 IEM_MC_BEGIN(0, 2);
1730 IEM_MC_LOCAL(uint64_t, uSrc);
1731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1732
1733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1735 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1736 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1737
1738 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1739 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1740
1741 IEM_MC_ADVANCE_RIP();
1742 IEM_MC_END();
1743 return VINF_SUCCESS;
1744 }
1745
1746 /**
1747 * @opdone
1748 * @opmnemonic ud660f12m3
1749 * @opcode 0x12
1750 * @opcodesub 11 mr/reg
1751 * @oppfx 0x66
1752 * @opunused immediate
1753 * @opcpuid sse
1754 * @optest ->
1755 */
1756 return IEMOP_RAISE_INVALID_OPCODE();
1757}
1758
1759
1760/**
1761 * @opcode 0x12
1762 * @oppfx 0xf3
1763 * @opcpuid sse3
1764 * @opgroup og_sse3_pcksclr_datamove
1765 * @opxcpttype 4
1766 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1767 * op1=0x00000002000000020000000100000001
1768 */
1769FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1770{
1771 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1773 if (IEM_IS_MODRM_REG_MODE(bRm))
1774 {
1775 /*
1776 * Register, register.
1777 */
1778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1779 IEM_MC_BEGIN(2, 0);
1780 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1781 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1782
1783 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1784 IEM_MC_PREPARE_SSE_USAGE();
1785
1786 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1787 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1788 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1789
1790 IEM_MC_ADVANCE_RIP();
1791 IEM_MC_END();
1792 }
1793 else
1794 {
1795 /*
1796 * Register, memory.
1797 */
1798 IEM_MC_BEGIN(2, 2);
1799 IEM_MC_LOCAL(RTUINT128U, uSrc);
1800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1801 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1802 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1803
1804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1806 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1807 IEM_MC_PREPARE_SSE_USAGE();
1808
1809 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1810 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1811 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1812
1813 IEM_MC_ADVANCE_RIP();
1814 IEM_MC_END();
1815 }
1816 return VINF_SUCCESS;
1817}
1818
1819
1820/**
1821 * @opcode 0x12
1822 * @oppfx 0xf2
1823 * @opcpuid sse3
1824 * @opgroup og_sse3_pcksclr_datamove
1825 * @opxcpttype 5
1826 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1827 * op1=0x22222222111111112222222211111111
1828 */
1829FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1830{
1831 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1833 if (IEM_IS_MODRM_REG_MODE(bRm))
1834 {
1835 /*
1836 * Register, register.
1837 */
1838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1839 IEM_MC_BEGIN(2, 0);
1840 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1841 IEM_MC_ARG(uint64_t, uSrc, 1);
1842
1843 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1844 IEM_MC_PREPARE_SSE_USAGE();
1845
1846 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1847 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1848 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1849
1850 IEM_MC_ADVANCE_RIP();
1851 IEM_MC_END();
1852 }
1853 else
1854 {
1855 /*
1856 * Register, memory.
1857 */
1858 IEM_MC_BEGIN(2, 2);
1859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1860 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1861 IEM_MC_ARG(uint64_t, uSrc, 1);
1862
1863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1865 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1866 IEM_MC_PREPARE_SSE_USAGE();
1867
1868 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1869 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1870 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1871
1872 IEM_MC_ADVANCE_RIP();
1873 IEM_MC_END();
1874 }
1875 return VINF_SUCCESS;
1876}
1877
1878
1879/**
1880 * @opcode 0x13
1881 * @opcodesub !11 mr/reg
1882 * @oppfx none
1883 * @opcpuid sse
1884 * @opgroup og_sse_simdfp_datamove
1885 * @opxcpttype 5
1886 * @optest op1=1 op2=2 -> op1=2
1887 * @optest op1=0 op2=-42 -> op1=-42
1888 */
1889FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1890{
1891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1892 if (IEM_IS_MODRM_MEM_MODE(bRm))
1893 {
1894 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1895
1896 IEM_MC_BEGIN(0, 2);
1897 IEM_MC_LOCAL(uint64_t, uSrc);
1898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1899
1900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1902 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1903 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1904
1905 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1906 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1907
1908 IEM_MC_ADVANCE_RIP();
1909 IEM_MC_END();
1910 return VINF_SUCCESS;
1911 }
1912
1913 /**
1914 * @opdone
1915 * @opmnemonic ud0f13m3
1916 * @opcode 0x13
1917 * @opcodesub 11 mr/reg
1918 * @oppfx none
1919 * @opunused immediate
1920 * @opcpuid sse
1921 * @optest ->
1922 */
1923 return IEMOP_RAISE_INVALID_OPCODE();
1924}
1925
1926
1927/**
1928 * @opcode 0x13
1929 * @opcodesub !11 mr/reg
1930 * @oppfx 0x66
1931 * @opcpuid sse2
1932 * @opgroup og_sse2_pcksclr_datamove
1933 * @opxcpttype 5
1934 * @optest op1=1 op2=2 -> op1=2
1935 * @optest op1=0 op2=-42 -> op1=-42
1936 */
1937FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1938{
1939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1940 if (IEM_IS_MODRM_MEM_MODE(bRm))
1941 {
1942 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1943 IEM_MC_BEGIN(0, 2);
1944 IEM_MC_LOCAL(uint64_t, uSrc);
1945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1946
1947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1949 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1950 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1951
1952 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1953 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1954
1955 IEM_MC_ADVANCE_RIP();
1956 IEM_MC_END();
1957 return VINF_SUCCESS;
1958 }
1959
1960 /**
1961 * @opdone
1962 * @opmnemonic ud660f13m3
1963 * @opcode 0x13
1964 * @opcodesub 11 mr/reg
1965 * @oppfx 0x66
1966 * @opunused immediate
1967 * @opcpuid sse
1968 * @optest ->
1969 */
1970 return IEMOP_RAISE_INVALID_OPCODE();
1971}
1972
1973
1974/**
1975 * @opmnemonic udf30f13
1976 * @opcode 0x13
1977 * @oppfx 0xf3
1978 * @opunused intel-modrm
1979 * @opcpuid sse
1980 * @optest ->
1981 * @opdone
1982 */
1983
1984/**
1985 * @opmnemonic udf20f13
1986 * @opcode 0x13
1987 * @oppfx 0xf2
1988 * @opunused intel-modrm
1989 * @opcpuid sse
1990 * @optest ->
1991 * @opdone
1992 */
1993
1994/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1995FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1996/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1997FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1998
1999/**
2000 * @opdone
2001 * @opmnemonic udf30f14
2002 * @opcode 0x14
2003 * @oppfx 0xf3
2004 * @opunused intel-modrm
2005 * @opcpuid sse
2006 * @optest ->
2007 * @opdone
2008 */
2009
2010/**
2011 * @opmnemonic udf20f14
2012 * @opcode 0x14
2013 * @oppfx 0xf2
2014 * @opunused intel-modrm
2015 * @opcpuid sse
2016 * @optest ->
2017 * @opdone
2018 */
2019
2020/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2021FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
2022/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2023FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
2024/* Opcode 0xf3 0x0f 0x15 - invalid */
2025/* Opcode 0xf2 0x0f 0x15 - invalid */
2026
2027/**
2028 * @opdone
2029 * @opmnemonic udf30f15
2030 * @opcode 0x15
2031 * @oppfx 0xf3
2032 * @opunused intel-modrm
2033 * @opcpuid sse
2034 * @optest ->
2035 * @opdone
2036 */
2037
2038/**
2039 * @opmnemonic udf20f15
2040 * @opcode 0x15
2041 * @oppfx 0xf2
2042 * @opunused intel-modrm
2043 * @opcpuid sse
2044 * @optest ->
2045 * @opdone
2046 */
2047
2048FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2049{
2050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2051 if (IEM_IS_MODRM_REG_MODE(bRm))
2052 {
2053 /**
2054 * @opcode 0x16
2055 * @opcodesub 11 mr/reg
2056 * @oppfx none
2057 * @opcpuid sse
2058 * @opgroup og_sse_simdfp_datamove
2059 * @opxcpttype 5
2060 * @optest op1=1 op2=2 -> op1=2
2061 * @optest op1=0 op2=-42 -> op1=-42
2062 */
2063 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2064
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_BEGIN(0, 1);
2067 IEM_MC_LOCAL(uint64_t, uSrc);
2068
2069 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2070 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2071 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2072 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2073
2074 IEM_MC_ADVANCE_RIP();
2075 IEM_MC_END();
2076 }
2077 else
2078 {
2079 /**
2080 * @opdone
2081 * @opcode 0x16
2082 * @opcodesub !11 mr/reg
2083 * @oppfx none
2084 * @opcpuid sse
2085 * @opgroup og_sse_simdfp_datamove
2086 * @opxcpttype 5
2087 * @optest op1=1 op2=2 -> op1=2
2088 * @optest op1=0 op2=-42 -> op1=-42
2089 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2090 */
2091 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2092
2093 IEM_MC_BEGIN(0, 2);
2094 IEM_MC_LOCAL(uint64_t, uSrc);
2095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2096
2097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2099 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2100 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2101
2102 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2103 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2104
2105 IEM_MC_ADVANCE_RIP();
2106 IEM_MC_END();
2107 }
2108 return VINF_SUCCESS;
2109}
2110
2111
2112/**
2113 * @opcode 0x16
2114 * @opcodesub !11 mr/reg
2115 * @oppfx 0x66
2116 * @opcpuid sse2
2117 * @opgroup og_sse2_pcksclr_datamove
2118 * @opxcpttype 5
2119 * @optest op1=1 op2=2 -> op1=2
2120 * @optest op1=0 op2=-42 -> op1=-42
2121 */
2122FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2123{
2124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2125 if (IEM_IS_MODRM_MEM_MODE(bRm))
2126 {
2127 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2128 IEM_MC_BEGIN(0, 2);
2129 IEM_MC_LOCAL(uint64_t, uSrc);
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2131
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2134 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2136
2137 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2138 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2139
2140 IEM_MC_ADVANCE_RIP();
2141 IEM_MC_END();
2142 return VINF_SUCCESS;
2143 }
2144
2145 /**
2146 * @opdone
2147 * @opmnemonic ud660f16m3
2148 * @opcode 0x16
2149 * @opcodesub 11 mr/reg
2150 * @oppfx 0x66
2151 * @opunused immediate
2152 * @opcpuid sse
2153 * @optest ->
2154 */
2155 return IEMOP_RAISE_INVALID_OPCODE();
2156}
2157
2158
2159/**
2160 * @opcode 0x16
2161 * @oppfx 0xf3
2162 * @opcpuid sse3
2163 * @opgroup og_sse3_pcksclr_datamove
2164 * @opxcpttype 4
2165 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2166 * op1=0x00000002000000020000000100000001
2167 */
2168FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2169{
2170 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2172 if (IEM_IS_MODRM_REG_MODE(bRm))
2173 {
2174 /*
2175 * Register, register.
2176 */
2177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2178 IEM_MC_BEGIN(2, 0);
2179 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2180 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2181
2182 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2183 IEM_MC_PREPARE_SSE_USAGE();
2184
2185 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2186 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2187 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2188
2189 IEM_MC_ADVANCE_RIP();
2190 IEM_MC_END();
2191 }
2192 else
2193 {
2194 /*
2195 * Register, memory.
2196 */
2197 IEM_MC_BEGIN(2, 2);
2198 IEM_MC_LOCAL(RTUINT128U, uSrc);
2199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2200 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2201 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2202
2203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2205 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2206 IEM_MC_PREPARE_SSE_USAGE();
2207
2208 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2209 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2210 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2211
2212 IEM_MC_ADVANCE_RIP();
2213 IEM_MC_END();
2214 }
2215 return VINF_SUCCESS;
2216}
2217
2218/**
2219 * @opdone
2220 * @opmnemonic udf30f16
2221 * @opcode 0x16
2222 * @oppfx 0xf2
2223 * @opunused intel-modrm
2224 * @opcpuid sse
2225 * @optest ->
2226 * @opdone
2227 */
2228
2229
2230/**
2231 * @opcode 0x17
2232 * @opcodesub !11 mr/reg
2233 * @oppfx none
2234 * @opcpuid sse
2235 * @opgroup og_sse_simdfp_datamove
2236 * @opxcpttype 5
2237 * @optest op1=1 op2=2 -> op1=2
2238 * @optest op1=0 op2=-42 -> op1=-42
2239 */
2240FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2241{
2242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2243 if (IEM_IS_MODRM_MEM_MODE(bRm))
2244 {
2245 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2246
2247 IEM_MC_BEGIN(0, 2);
2248 IEM_MC_LOCAL(uint64_t, uSrc);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250
2251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2253 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2254 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2255
2256 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2257 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2258
2259 IEM_MC_ADVANCE_RIP();
2260 IEM_MC_END();
2261 return VINF_SUCCESS;
2262 }
2263
2264 /**
2265 * @opdone
2266 * @opmnemonic ud0f17m3
2267 * @opcode 0x17
2268 * @opcodesub 11 mr/reg
2269 * @oppfx none
2270 * @opunused immediate
2271 * @opcpuid sse
2272 * @optest ->
2273 */
2274 return IEMOP_RAISE_INVALID_OPCODE();
2275}
2276
2277
2278/**
2279 * @opcode 0x17
2280 * @opcodesub !11 mr/reg
2281 * @oppfx 0x66
2282 * @opcpuid sse2
2283 * @opgroup og_sse2_pcksclr_datamove
2284 * @opxcpttype 5
2285 * @optest op1=1 op2=2 -> op1=2
2286 * @optest op1=0 op2=-42 -> op1=-42
2287 */
2288FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2289{
2290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2291 if (IEM_IS_MODRM_MEM_MODE(bRm))
2292 {
2293 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2294
2295 IEM_MC_BEGIN(0, 2);
2296 IEM_MC_LOCAL(uint64_t, uSrc);
2297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2298
2299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2302 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2303
2304 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2305 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2306
2307 IEM_MC_ADVANCE_RIP();
2308 IEM_MC_END();
2309 return VINF_SUCCESS;
2310 }
2311
2312 /**
2313 * @opdone
2314 * @opmnemonic ud660f17m3
2315 * @opcode 0x17
2316 * @opcodesub 11 mr/reg
2317 * @oppfx 0x66
2318 * @opunused immediate
2319 * @opcpuid sse
2320 * @optest ->
2321 */
2322 return IEMOP_RAISE_INVALID_OPCODE();
2323}
2324
2325
2326/**
2327 * @opdone
2328 * @opmnemonic udf30f17
2329 * @opcode 0x17
2330 * @oppfx 0xf3
2331 * @opunused intel-modrm
2332 * @opcpuid sse
2333 * @optest ->
2334 * @opdone
2335 */
2336
2337/**
2338 * @opmnemonic udf20f17
2339 * @opcode 0x17
2340 * @oppfx 0xf2
2341 * @opunused intel-modrm
2342 * @opcpuid sse
2343 * @optest ->
2344 * @opdone
2345 */
2346
2347
2348/** Opcode 0x0f 0x18. */
2349FNIEMOP_DEF(iemOp_prefetch_Grp16)
2350{
2351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2352 if (IEM_IS_MODRM_MEM_MODE(bRm))
2353 {
2354 switch (IEM_GET_MODRM_REG_8(bRm))
2355 {
2356 case 4: /* Aliased to /0 for the time being according to AMD. */
2357 case 5: /* Aliased to /0 for the time being according to AMD. */
2358 case 6: /* Aliased to /0 for the time being according to AMD. */
2359 case 7: /* Aliased to /0 for the time being according to AMD. */
2360 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2361 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2362 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2363 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2365 }
2366
2367 IEM_MC_BEGIN(0, 1);
2368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2371 /* Currently a NOP. */
2372 NOREF(GCPtrEffSrc);
2373 IEM_MC_ADVANCE_RIP();
2374 IEM_MC_END();
2375 return VINF_SUCCESS;
2376 }
2377
2378 return IEMOP_RAISE_INVALID_OPCODE();
2379}
2380
2381
2382/** Opcode 0x0f 0x19..0x1f. */
2383FNIEMOP_DEF(iemOp_nop_Ev)
2384{
2385 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2387 if (IEM_IS_MODRM_REG_MODE(bRm))
2388 {
2389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2390 IEM_MC_BEGIN(0, 0);
2391 IEM_MC_ADVANCE_RIP();
2392 IEM_MC_END();
2393 }
2394 else
2395 {
2396 IEM_MC_BEGIN(0, 1);
2397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2400 /* Currently a NOP. */
2401 NOREF(GCPtrEffSrc);
2402 IEM_MC_ADVANCE_RIP();
2403 IEM_MC_END();
2404 }
2405 return VINF_SUCCESS;
2406}
2407
2408
2409/** Opcode 0x0f 0x20. */
2410FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2411{
2412 /* mod is ignored, as is operand size overrides. */
2413 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2414 IEMOP_HLP_MIN_386();
2415 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2416 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2417 else
2418 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2419
2420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2421 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
2422 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2423 {
2424 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2425 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2426 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2427 iCrReg |= 8;
2428 }
2429 switch (iCrReg)
2430 {
2431 case 0: case 2: case 3: case 4: case 8:
2432 break;
2433 default:
2434 return IEMOP_RAISE_INVALID_OPCODE();
2435 }
2436 IEMOP_HLP_DONE_DECODING();
2437
2438 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
2439}
2440
2441
2442/** Opcode 0x0f 0x21. */
2443FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2444{
2445 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2446 IEMOP_HLP_MIN_386();
2447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2449 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2450 return IEMOP_RAISE_INVALID_OPCODE();
2451 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2452 IEM_GET_MODRM_RM(pVCpu, bRm),
2453 IEM_GET_MODRM_REG_8(bRm));
2454}
2455
2456
2457/** Opcode 0x0f 0x22. */
2458FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2459{
2460 /* mod is ignored, as is operand size overrides. */
2461 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2462 IEMOP_HLP_MIN_386();
2463 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2464 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2465 else
2466 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2467
2468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2469 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
2470 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2471 {
2472 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2473 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2474 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2475 iCrReg |= 8;
2476 }
2477 switch (iCrReg)
2478 {
2479 case 0: case 2: case 3: case 4: case 8:
2480 break;
2481 default:
2482 return IEMOP_RAISE_INVALID_OPCODE();
2483 }
2484 IEMOP_HLP_DONE_DECODING();
2485
2486 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
2487}
2488
2489
2490/** Opcode 0x0f 0x23. */
2491FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2492{
2493 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2494 IEMOP_HLP_MIN_386();
2495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2497 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2498 return IEMOP_RAISE_INVALID_OPCODE();
2499 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2500 IEM_GET_MODRM_REG_8(bRm),
2501 IEM_GET_MODRM_RM(pVCpu, bRm));
2502}
2503
2504
2505/** Opcode 0x0f 0x24. */
2506FNIEMOP_DEF(iemOp_mov_Rd_Td)
2507{
2508 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2509 IEMOP_HLP_MIN_386();
2510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2512 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2513 return IEMOP_RAISE_INVALID_OPCODE();
2514 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
2515 IEM_GET_MODRM_RM(pVCpu, bRm),
2516 IEM_GET_MODRM_REG_8(bRm));
2517}
2518
2519
2520/** Opcode 0x0f 0x26. */
2521FNIEMOP_DEF(iemOp_mov_Td_Rd)
2522{
2523 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2524 IEMOP_HLP_MIN_386();
2525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2527 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2528 return IEMOP_RAISE_INVALID_OPCODE();
2529 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
2530 IEM_GET_MODRM_REG_8(bRm),
2531 IEM_GET_MODRM_RM(pVCpu, bRm));
2532}
2533
2534
2535/**
2536 * @opcode 0x28
2537 * @oppfx none
2538 * @opcpuid sse
2539 * @opgroup og_sse_simdfp_datamove
2540 * @opxcpttype 1
2541 * @optest op1=1 op2=2 -> op1=2
2542 * @optest op1=0 op2=-42 -> op1=-42
2543 */
2544FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2545{
2546 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2548 if (IEM_IS_MODRM_REG_MODE(bRm))
2549 {
2550 /*
2551 * Register, register.
2552 */
2553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2554 IEM_MC_BEGIN(0, 0);
2555 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2556 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2557 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2558 IEM_GET_MODRM_RM(pVCpu, bRm));
2559 IEM_MC_ADVANCE_RIP();
2560 IEM_MC_END();
2561 }
2562 else
2563 {
2564 /*
2565 * Register, memory.
2566 */
2567 IEM_MC_BEGIN(0, 2);
2568 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2570
2571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2573 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2574 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2575
2576 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2577 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2578
2579 IEM_MC_ADVANCE_RIP();
2580 IEM_MC_END();
2581 }
2582 return VINF_SUCCESS;
2583}
2584
2585/**
2586 * @opcode 0x28
2587 * @oppfx 66
2588 * @opcpuid sse2
2589 * @opgroup og_sse2_pcksclr_datamove
2590 * @opxcpttype 1
2591 * @optest op1=1 op2=2 -> op1=2
2592 * @optest op1=0 op2=-42 -> op1=-42
2593 */
2594FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2595{
2596 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2598 if (IEM_IS_MODRM_REG_MODE(bRm))
2599 {
2600 /*
2601 * Register, register.
2602 */
2603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2604 IEM_MC_BEGIN(0, 0);
2605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2607 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2608 IEM_GET_MODRM_RM(pVCpu, bRm));
2609 IEM_MC_ADVANCE_RIP();
2610 IEM_MC_END();
2611 }
2612 else
2613 {
2614 /*
2615 * Register, memory.
2616 */
2617 IEM_MC_BEGIN(0, 2);
2618 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2620
2621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2623 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2624 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2625
2626 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2627 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2628
2629 IEM_MC_ADVANCE_RIP();
2630 IEM_MC_END();
2631 }
2632 return VINF_SUCCESS;
2633}
2634
2635/* Opcode 0xf3 0x0f 0x28 - invalid */
2636/* Opcode 0xf2 0x0f 0x28 - invalid */
2637
2638/**
2639 * @opcode 0x29
2640 * @oppfx none
2641 * @opcpuid sse
2642 * @opgroup og_sse_simdfp_datamove
2643 * @opxcpttype 1
2644 * @optest op1=1 op2=2 -> op1=2
2645 * @optest op1=0 op2=-42 -> op1=-42
2646 */
2647FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2648{
2649 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2651 if (IEM_IS_MODRM_REG_MODE(bRm))
2652 {
2653 /*
2654 * Register, register.
2655 */
2656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2657 IEM_MC_BEGIN(0, 0);
2658 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2659 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2660 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2661 IEM_GET_MODRM_REG(pVCpu, bRm));
2662 IEM_MC_ADVANCE_RIP();
2663 IEM_MC_END();
2664 }
2665 else
2666 {
2667 /*
2668 * Memory, register.
2669 */
2670 IEM_MC_BEGIN(0, 2);
2671 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2673
2674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2678
2679 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2680 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2681
2682 IEM_MC_ADVANCE_RIP();
2683 IEM_MC_END();
2684 }
2685 return VINF_SUCCESS;
2686}
2687
2688/**
2689 * @opcode 0x29
2690 * @oppfx 66
2691 * @opcpuid sse2
2692 * @opgroup og_sse2_pcksclr_datamove
2693 * @opxcpttype 1
2694 * @optest op1=1 op2=2 -> op1=2
2695 * @optest op1=0 op2=-42 -> op1=-42
2696 */
2697FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2698{
2699 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2701 if (IEM_IS_MODRM_REG_MODE(bRm))
2702 {
2703 /*
2704 * Register, register.
2705 */
2706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2707 IEM_MC_BEGIN(0, 0);
2708 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2709 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2710 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2711 IEM_GET_MODRM_REG(pVCpu, bRm));
2712 IEM_MC_ADVANCE_RIP();
2713 IEM_MC_END();
2714 }
2715 else
2716 {
2717 /*
2718 * Memory, register.
2719 */
2720 IEM_MC_BEGIN(0, 2);
2721 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2723
2724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2726 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2727 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2728
2729 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2730 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2731
2732 IEM_MC_ADVANCE_RIP();
2733 IEM_MC_END();
2734 }
2735 return VINF_SUCCESS;
2736}
2737
2738/* Opcode 0xf3 0x0f 0x29 - invalid */
2739/* Opcode 0xf2 0x0f 0x29 - invalid */
2740
2741
2742/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2743FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2744/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2745FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2746/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2747FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2748/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2749FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2750
2751
2752/**
2753 * @opcode 0x2b
2754 * @opcodesub !11 mr/reg
2755 * @oppfx none
2756 * @opcpuid sse
2757 * @opgroup og_sse1_cachect
2758 * @opxcpttype 1
2759 * @optest op1=1 op2=2 -> op1=2
2760 * @optest op1=0 op2=-42 -> op1=-42
2761 */
2762FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2763{
2764 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2766 if (IEM_IS_MODRM_MEM_MODE(bRm))
2767 {
2768 /*
2769 * memory, register.
2770 */
2771 IEM_MC_BEGIN(0, 2);
2772 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2774
2775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2777 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2778 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2779
2780 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2781 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2782
2783 IEM_MC_ADVANCE_RIP();
2784 IEM_MC_END();
2785 }
2786 /* The register, register encoding is invalid. */
2787 else
2788 return IEMOP_RAISE_INVALID_OPCODE();
2789 return VINF_SUCCESS;
2790}
2791
2792/**
2793 * @opcode 0x2b
2794 * @opcodesub !11 mr/reg
2795 * @oppfx 0x66
2796 * @opcpuid sse2
2797 * @opgroup og_sse2_cachect
2798 * @opxcpttype 1
2799 * @optest op1=1 op2=2 -> op1=2
2800 * @optest op1=0 op2=-42 -> op1=-42
2801 */
2802FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2803{
2804 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2806 if (IEM_IS_MODRM_MEM_MODE(bRm))
2807 {
2808 /*
2809 * memory, register.
2810 */
2811 IEM_MC_BEGIN(0, 2);
2812 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2814
2815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2817 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2818 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2819
2820 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2821 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2822
2823 IEM_MC_ADVANCE_RIP();
2824 IEM_MC_END();
2825 }
2826 /* The register, register encoding is invalid. */
2827 else
2828 return IEMOP_RAISE_INVALID_OPCODE();
2829 return VINF_SUCCESS;
2830}
2831/* Opcode 0xf3 0x0f 0x2b - invalid */
2832/* Opcode 0xf2 0x0f 0x2b - invalid */
2833
2834
2835/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2836FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2837/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2838FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2839/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2840FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2841/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2842FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2843
2844/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2845FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2846/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2847FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2848/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2849FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2850/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2851FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2852
2853/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2854FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2855/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2856FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2857/* Opcode 0xf3 0x0f 0x2e - invalid */
2858/* Opcode 0xf2 0x0f 0x2e - invalid */
2859
2860/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2861FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2862/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2863FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2864/* Opcode 0xf3 0x0f 0x2f - invalid */
2865/* Opcode 0xf2 0x0f 0x2f - invalid */
2866
2867/** Opcode 0x0f 0x30. */
2868FNIEMOP_DEF(iemOp_wrmsr)
2869{
2870 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2873}
2874
2875
2876/** Opcode 0x0f 0x31. */
2877FNIEMOP_DEF(iemOp_rdtsc)
2878{
2879 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2882}
2883
2884
2885/** Opcode 0x0f 0x33. */
2886FNIEMOP_DEF(iemOp_rdmsr)
2887{
2888 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2890 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2891}
2892
2893
2894/** Opcode 0x0f 0x34. */
2895FNIEMOP_DEF(iemOp_rdpmc)
2896{
2897 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2899 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2900}
2901
2902
2903/** Opcode 0x0f 0x34. */
2904FNIEMOP_DEF(iemOp_sysenter)
2905{
2906 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
2907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2908 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
2909}
2910
2911/** Opcode 0x0f 0x35. */
2912FNIEMOP_DEF(iemOp_sysexit)
2913{
2914 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
2915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2916 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
2917}
2918
2919/** Opcode 0x0f 0x37. */
2920FNIEMOP_STUB(iemOp_getsec);
2921
2922
2923/** Opcode 0x0f 0x38. */
2924FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2925{
2926#ifdef IEM_WITH_THREE_0F_38
2927 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2928 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2929#else
2930 IEMOP_BITCH_ABOUT_STUB();
2931 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2932#endif
2933}
2934
2935
2936/** Opcode 0x0f 0x3a. */
2937FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2938{
2939#ifdef IEM_WITH_THREE_0F_3A
2940 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2941 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2942#else
2943 IEMOP_BITCH_ABOUT_STUB();
2944 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2945#endif
2946}
2947
2948
2949/**
2950 * Implements a conditional move.
2951 *
2952 * Wish there was an obvious way to do this where we could share and reduce
2953 * code bloat.
2954 *
2955 * @param a_Cnd The conditional "microcode" operation.
2956 */
2957#define CMOV_X(a_Cnd) \
2958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2959 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2960 { \
2961 switch (pVCpu->iem.s.enmEffOpSize) \
2962 { \
2963 case IEMMODE_16BIT: \
2964 IEM_MC_BEGIN(0, 1); \
2965 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2966 a_Cnd { \
2967 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2968 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
2969 } IEM_MC_ENDIF(); \
2970 IEM_MC_ADVANCE_RIP(); \
2971 IEM_MC_END(); \
2972 return VINF_SUCCESS; \
2973 \
2974 case IEMMODE_32BIT: \
2975 IEM_MC_BEGIN(0, 1); \
2976 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2977 a_Cnd { \
2978 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2979 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
2980 } IEM_MC_ELSE() { \
2981 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2982 } IEM_MC_ENDIF(); \
2983 IEM_MC_ADVANCE_RIP(); \
2984 IEM_MC_END(); \
2985 return VINF_SUCCESS; \
2986 \
2987 case IEMMODE_64BIT: \
2988 IEM_MC_BEGIN(0, 1); \
2989 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2990 a_Cnd { \
2991 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2992 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
2993 } IEM_MC_ENDIF(); \
2994 IEM_MC_ADVANCE_RIP(); \
2995 IEM_MC_END(); \
2996 return VINF_SUCCESS; \
2997 \
2998 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2999 } \
3000 } \
3001 else \
3002 { \
3003 switch (pVCpu->iem.s.enmEffOpSize) \
3004 { \
3005 case IEMMODE_16BIT: \
3006 IEM_MC_BEGIN(0, 2); \
3007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3008 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3010 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3011 a_Cnd { \
3012 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3013 } IEM_MC_ENDIF(); \
3014 IEM_MC_ADVANCE_RIP(); \
3015 IEM_MC_END(); \
3016 return VINF_SUCCESS; \
3017 \
3018 case IEMMODE_32BIT: \
3019 IEM_MC_BEGIN(0, 2); \
3020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3021 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3023 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3024 a_Cnd { \
3025 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3026 } IEM_MC_ELSE() { \
3027 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3028 } IEM_MC_ENDIF(); \
3029 IEM_MC_ADVANCE_RIP(); \
3030 IEM_MC_END(); \
3031 return VINF_SUCCESS; \
3032 \
3033 case IEMMODE_64BIT: \
3034 IEM_MC_BEGIN(0, 2); \
3035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3036 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3038 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3039 a_Cnd { \
3040 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3041 } IEM_MC_ENDIF(); \
3042 IEM_MC_ADVANCE_RIP(); \
3043 IEM_MC_END(); \
3044 return VINF_SUCCESS; \
3045 \
3046 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3047 } \
3048 } do {} while (0)
3049
3050
3051
3052/** Opcode 0x0f 0x40. */
3053FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
3054{
3055 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
3056 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
3057}
3058
3059
3060/** Opcode 0x0f 0x41. */
3061FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
3062{
3063 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
3064 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
3065}
3066
3067
3068/** Opcode 0x0f 0x42. */
3069FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
3070{
3071 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
3072 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
3073}
3074
3075
3076/** Opcode 0x0f 0x43. */
3077FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
3078{
3079 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
3080 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
3081}
3082
3083
3084/** Opcode 0x0f 0x44. */
3085FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
3086{
3087 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
3088 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
3089}
3090
3091
3092/** Opcode 0x0f 0x45. */
3093FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
3094{
3095 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
3096 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
3097}
3098
3099
3100/** Opcode 0x0f 0x46. */
3101FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
3102{
3103 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
3104 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3105}
3106
3107
3108/** Opcode 0x0f 0x47. */
3109FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
3110{
3111 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
3112 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3113}
3114
3115
3116/** Opcode 0x0f 0x48. */
3117FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
3118{
3119 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
3120 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
3121}
3122
3123
3124/** Opcode 0x0f 0x49. */
3125FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
3126{
3127 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
3128 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
3129}
3130
3131
3132/** Opcode 0x0f 0x4a. */
3133FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
3134{
3135 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
3136 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
3137}
3138
3139
3140/** Opcode 0x0f 0x4b. */
3141FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
3142{
3143 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
3144 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
3145}
3146
3147
3148/** Opcode 0x0f 0x4c. */
3149FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
3150{
3151 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
3152 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
3153}
3154
3155
3156/** Opcode 0x0f 0x4d. */
3157FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
3158{
3159 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
3160 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
3161}
3162
3163
3164/** Opcode 0x0f 0x4e. */
3165FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
3166{
3167 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
3168 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3169}
3170
3171
3172/** Opcode 0x0f 0x4f. */
3173FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
3174{
3175 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
3176 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3177}
3178
3179#undef CMOV_X
3180
3181/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
3182FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
3183/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
3184FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
3185/* Opcode 0xf3 0x0f 0x50 - invalid */
3186/* Opcode 0xf2 0x0f 0x50 - invalid */
3187
3188/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
3189FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
3190/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
3191FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
3192/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
3193FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
3194/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
3195FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
3196
3197/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
3198FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
3199/* Opcode 0x66 0x0f 0x52 - invalid */
3200/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
3201FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
3202/* Opcode 0xf2 0x0f 0x52 - invalid */
3203
3204/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
3205FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
3206/* Opcode 0x66 0x0f 0x53 - invalid */
3207/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
3208FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
3209/* Opcode 0xf2 0x0f 0x53 - invalid */
3210
3211
3212/** Opcode 0x0f 0x54 - andps Vps, Wps */
3213FNIEMOP_DEF(iemOp_andps_Vps_Wps)
3214{
3215 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3216 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3217}
3218
3219
3220/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
3221FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
3222{
3223 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3224 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3225}
3226
3227
3228/* Opcode 0xf3 0x0f 0x54 - invalid */
3229/* Opcode 0xf2 0x0f 0x54 - invalid */
3230
3231
3232/** Opcode 0x0f 0x55 - andnps Vps, Wps */
3233FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
3234{
3235 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3236 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3237}
3238
3239
3240/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
3241FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
3242{
3243 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3244 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3245}
3246
3247
3248/* Opcode 0xf3 0x0f 0x55 - invalid */
3249/* Opcode 0xf2 0x0f 0x55 - invalid */
3250
3251
3252/** Opcode 0x0f 0x56 - orps Vps, Wps */
3253FNIEMOP_DEF(iemOp_orps_Vps_Wps)
3254{
3255 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3256 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3257}
3258
3259
3260/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
3261FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
3262{
3263 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3264 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3265}
3266
3267
3268/* Opcode 0xf3 0x0f 0x56 - invalid */
3269/* Opcode 0xf2 0x0f 0x56 - invalid */
3270
3271
3272/** Opcode 0x0f 0x57 - xorps Vps, Wps */
3273FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
3274{
3275 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3276 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3277}
3278
3279
3280/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
3281FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
3282{
3283 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3284 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3285}
3286
3287
3288/* Opcode 0xf3 0x0f 0x57 - invalid */
3289/* Opcode 0xf2 0x0f 0x57 - invalid */
3290
3291/** Opcode 0x0f 0x58 - addps Vps, Wps */
3292FNIEMOP_STUB(iemOp_addps_Vps_Wps);
3293/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
3294FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
3295/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
3296FNIEMOP_STUB(iemOp_addss_Vss_Wss);
3297/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
3298FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
3299
3300/** Opcode 0x0f 0x59 - mulps Vps, Wps */
3301FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
3302/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
3303FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
3304/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
3305FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
3306/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
3307FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
3308
3309/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3310FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3311/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3312FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3313/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3314FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3315/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3316FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3317
3318/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3319FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3320/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3321FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3322/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3323FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3324/* Opcode 0xf2 0x0f 0x5b - invalid */
3325
3326/** Opcode 0x0f 0x5c - subps Vps, Wps */
3327FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3328/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3329FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3330/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3331FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3332/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3333FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3334
3335/** Opcode 0x0f 0x5d - minps Vps, Wps */
3336FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3337/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3338FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3339/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3340FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3341/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3342FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3343
3344/** Opcode 0x0f 0x5e - divps Vps, Wps */
3345FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3346/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3347FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3348/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3349FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3350/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3351FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3352
3353/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3354FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3355/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3356FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3357/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3358FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3359/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3360FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3361
3362/**
3363 * Common worker for MMX instructions on the forms:
3364 * pxxxx mm1, mm2/mem32
3365 *
3366 * The 2nd operand is the first half of a register, which in the memory case
3367 * means a 32-bit memory access.
3368 */
3369FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
3370{
3371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3372 if (IEM_IS_MODRM_REG_MODE(bRm))
3373 {
3374 /*
3375 * Register, register.
3376 */
3377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3378 IEM_MC_BEGIN(2, 0);
3379 IEM_MC_ARG(uint64_t *, puDst, 0);
3380 IEM_MC_ARG(uint64_t const *, puSrc, 1);
3381 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3382 IEM_MC_PREPARE_FPU_USAGE();
3383 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3384 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
3385 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3386 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3387 IEM_MC_FPU_TO_MMX_MODE();
3388 IEM_MC_ADVANCE_RIP();
3389 IEM_MC_END();
3390 }
3391 else
3392 {
3393 /*
3394 * Register, memory.
3395 */
3396 IEM_MC_BEGIN(2, 2);
3397 IEM_MC_ARG(uint64_t *, puDst, 0);
3398 IEM_MC_LOCAL(uint64_t, uSrc);
3399 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
3400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3401
3402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3404 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3405 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3406
3407 IEM_MC_PREPARE_FPU_USAGE();
3408 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3409 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3410 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3411 IEM_MC_FPU_TO_MMX_MODE();
3412
3413 IEM_MC_ADVANCE_RIP();
3414 IEM_MC_END();
3415 }
3416 return VINF_SUCCESS;
3417}
3418
3419
3420/**
3421 * Common worker for SSE2 instructions on the forms:
3422 * pxxxx xmm1, xmm2/mem128
3423 *
3424 * The 2nd operand is the first half of a register, which in the memory case
3425 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
3426 *
3427 * Exceptions type 4.
3428 */
3429FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
3430{
3431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3432 if (IEM_IS_MODRM_REG_MODE(bRm))
3433 {
3434 /*
3435 * Register, register.
3436 */
3437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3438 IEM_MC_BEGIN(2, 0);
3439 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3440 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3441 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3443 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3444 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3445 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3446 IEM_MC_ADVANCE_RIP();
3447 IEM_MC_END();
3448 }
3449 else
3450 {
3451 /*
3452 * Register, memory.
3453 */
3454 IEM_MC_BEGIN(2, 2);
3455 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3456 IEM_MC_LOCAL(RTUINT128U, uSrc);
3457 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3459
3460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3462 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3463 /** @todo Most CPUs probably only read the low qword. We read everything to
3464 * make sure we apply segmentation and alignment checks correctly.
3465 * When we have time, it would be interesting to explore what real
3466 * CPUs actually does and whether it will do a TLB load for the high
3467 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
3468 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3469
3470 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3471 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3472 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3473
3474 IEM_MC_ADVANCE_RIP();
3475 IEM_MC_END();
3476 }
3477 return VINF_SUCCESS;
3478}
3479
3480
3481/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3482FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3483{
3484 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3485 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
3486}
3487
3488
3489/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3490FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3491{
3492 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3493 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
3494}
3495
3496
3497/* Opcode 0xf3 0x0f 0x60 - invalid */
3498
3499
3500/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3501FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3502{
3503 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3504 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3505 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
3506}
3507
3508
3509/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3510FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3511{
3512 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3513 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
3514}
3515
3516
3517/* Opcode 0xf3 0x0f 0x61 - invalid */
3518
3519
3520/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3521FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3522{
3523 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3524 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
3525}
3526
3527
3528/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3529FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3530{
3531 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3532 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
3533}
3534
3535
3536/* Opcode 0xf3 0x0f 0x62 - invalid */
3537
3538
3539
3540/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3541FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
3542{
3543 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3544 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
3545}
3546
3547
3548/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3549FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
3550{
3551 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3552 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
3553}
3554
3555
3556/* Opcode 0xf3 0x0f 0x63 - invalid */
3557
3558
3559/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3560FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
3561{
3562 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3563 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
3564}
3565
3566
3567/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3568FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
3569{
3570 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3571 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
3572}
3573
3574
3575/* Opcode 0xf3 0x0f 0x64 - invalid */
3576
3577
3578/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3579FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
3580{
3581 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3582 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
3583}
3584
3585
3586/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3587FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
3588{
3589 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3590 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
3591}
3592
3593
3594/* Opcode 0xf3 0x0f 0x65 - invalid */
3595
3596
3597/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3598FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
3599{
3600 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3601 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
3602}
3603
3604
3605/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3606FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
3607{
3608 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3609 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
3610}
3611
3612
3613/* Opcode 0xf3 0x0f 0x66 - invalid */
3614
3615
3616/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3617FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
3618{
3619 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3620 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
3621}
3622
3623
3624/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
3625FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
3626{
3627 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3628 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
3629}
3630
3631
3632/* Opcode 0xf3 0x0f 0x67 - invalid */
3633
3634
3635/**
3636 * Common worker for MMX instructions on the form:
3637 * pxxxx mm1, mm2/mem64
3638 *
3639 * The 2nd operand is the second half of a register, which in the memory case
3640 * means a 64-bit memory access for MMX.
3641 */
3642FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
3643{
3644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3645 if (IEM_IS_MODRM_REG_MODE(bRm))
3646 {
3647 /*
3648 * Register, register.
3649 */
3650 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3651 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3653 IEM_MC_BEGIN(2, 0);
3654 IEM_MC_ARG(uint64_t *, puDst, 0);
3655 IEM_MC_ARG(uint64_t const *, puSrc, 1);
3656 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3657 IEM_MC_PREPARE_FPU_USAGE();
3658 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3659 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
3660 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3661 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3662 IEM_MC_FPU_TO_MMX_MODE();
3663 IEM_MC_ADVANCE_RIP();
3664 IEM_MC_END();
3665 }
3666 else
3667 {
3668 /*
3669 * Register, memory.
3670 */
3671 IEM_MC_BEGIN(2, 2);
3672 IEM_MC_ARG(uint64_t *, puDst, 0);
3673 IEM_MC_LOCAL(uint64_t, uSrc);
3674 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
3675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3676
3677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3679 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3680 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
3681
3682 IEM_MC_PREPARE_FPU_USAGE();
3683 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
3684 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
3685 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
3686 IEM_MC_FPU_TO_MMX_MODE();
3687
3688 IEM_MC_ADVANCE_RIP();
3689 IEM_MC_END();
3690 }
3691 return VINF_SUCCESS;
3692}
3693
3694
3695/**
3696 * Common worker for SSE2 instructions on the form:
3697 * pxxxx xmm1, xmm2/mem128
3698 *
3699 * The 2nd operand is the second half of a register, which for SSE a 128-bit
3700 * aligned access where it may read the full 128 bits or only the upper 64 bits.
3701 *
3702 * Exceptions type 4.
3703 */
3704FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
3705{
3706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3707 if (IEM_IS_MODRM_REG_MODE(bRm))
3708 {
3709 /*
3710 * Register, register.
3711 */
3712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3713 IEM_MC_BEGIN(2, 0);
3714 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3715 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3716 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3717 IEM_MC_PREPARE_SSE_USAGE();
3718 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3719 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3720 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3721 IEM_MC_ADVANCE_RIP();
3722 IEM_MC_END();
3723 }
3724 else
3725 {
3726 /*
3727 * Register, memory.
3728 */
3729 IEM_MC_BEGIN(2, 2);
3730 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3731 IEM_MC_LOCAL(RTUINT128U, uSrc);
3732 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3734
3735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3737 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3738 /** @todo Most CPUs probably only read the high qword. We read everything to
3739 * make sure we apply segmentation and alignment checks correctly.
3740 * When we have time, it would be interesting to explore what real
3741 * CPUs actually does and whether it will do a TLB load for the lower
3742 * part or skip any associated \#PF. */
3743 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3744
3745 IEM_MC_PREPARE_SSE_USAGE();
3746 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3747 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
3748
3749 IEM_MC_ADVANCE_RIP();
3750 IEM_MC_END();
3751 }
3752 return VINF_SUCCESS;
3753}
3754
3755
3756/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
3757 * @note Intel and AMD both uses Qd for the second parameter, however they
3758 * both list it as a mmX/mem64 operand and intel describes it as being
3759 * loaded as a qword, so it should be Qq, shouldn't it? */
3760FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
3761{
3762 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3763 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
3764}
3765
3766
3767/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3768FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3769{
3770 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3771 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
3772}
3773
3774
3775/* Opcode 0xf3 0x0f 0x68 - invalid */
3776
3777
3778/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
3779 * @note Intel and AMD both uses Qd for the second parameter, however they
3780 * both list it as a mmX/mem64 operand and intel describes it as being
3781 * loaded as a qword, so it should be Qq, shouldn't it? */
3782FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
3783{
3784 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3785 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
3786}
3787
3788
3789/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3790FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3791{
3792 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3793 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
3794
3795}
3796
3797
3798/* Opcode 0xf3 0x0f 0x69 - invalid */
3799
3800
3801/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
3802 * @note Intel and AMD both uses Qd for the second parameter, however they
3803 * both list it as a mmX/mem64 operand and intel describes it as being
3804 * loaded as a qword, so it should be Qq, shouldn't it? */
3805FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
3806{
3807 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3808 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
3809}
3810
3811
3812/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
3813FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
3814{
3815 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3816 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
3817}
3818
3819
3820/* Opcode 0xf3 0x0f 0x6a - invalid */
3821
3822
3823/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3824FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
3825{
3826 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
3827 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
3828}
3829
3830
3831/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3832FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
3833{
3834 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3835 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
3836}
3837
3838
3839/* Opcode 0xf3 0x0f 0x6b - invalid */
3840
3841
3842/* Opcode 0x0f 0x6c - invalid */
3843
3844
3845/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3846FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3847{
3848 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3849 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
3850}
3851
3852
3853/* Opcode 0xf3 0x0f 0x6c - invalid */
3854/* Opcode 0xf2 0x0f 0x6c - invalid */
3855
3856
3857/* Opcode 0x0f 0x6d - invalid */
3858
3859
3860/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
3861FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
3862{
3863 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3864 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
3865}
3866
3867
3868/* Opcode 0xf3 0x0f 0x6d - invalid */
3869
3870
3871FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3872{
3873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3874 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3875 {
3876 /**
3877 * @opcode 0x6e
3878 * @opcodesub rex.w=1
3879 * @oppfx none
3880 * @opcpuid mmx
3881 * @opgroup og_mmx_datamove
3882 * @opxcpttype 5
3883 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3884 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3885 */
3886 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
3887 if (IEM_IS_MODRM_REG_MODE(bRm))
3888 {
3889 /* MMX, greg64 */
3890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3891 IEM_MC_BEGIN(0, 1);
3892 IEM_MC_LOCAL(uint64_t, u64Tmp);
3893
3894 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3895 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3896
3897 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3898 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
3899 IEM_MC_FPU_TO_MMX_MODE();
3900
3901 IEM_MC_ADVANCE_RIP();
3902 IEM_MC_END();
3903 }
3904 else
3905 {
3906 /* MMX, [mem64] */
3907 IEM_MC_BEGIN(0, 2);
3908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3909 IEM_MC_LOCAL(uint64_t, u64Tmp);
3910
3911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3913 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3914 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3915
3916 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3917 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
3918 IEM_MC_FPU_TO_MMX_MODE();
3919
3920 IEM_MC_ADVANCE_RIP();
3921 IEM_MC_END();
3922 }
3923 }
3924 else
3925 {
3926 /**
3927 * @opdone
3928 * @opcode 0x6e
3929 * @opcodesub rex.w=0
3930 * @oppfx none
3931 * @opcpuid mmx
3932 * @opgroup og_mmx_datamove
3933 * @opxcpttype 5
3934 * @opfunction iemOp_movd_q_Pd_Ey
3935 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3936 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3937 */
3938 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
3939 if (IEM_IS_MODRM_REG_MODE(bRm))
3940 {
3941 /* MMX, greg */
3942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3943 IEM_MC_BEGIN(0, 1);
3944 IEM_MC_LOCAL(uint64_t, u64Tmp);
3945
3946 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3947 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3948
3949 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3950 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
3951 IEM_MC_FPU_TO_MMX_MODE();
3952
3953 IEM_MC_ADVANCE_RIP();
3954 IEM_MC_END();
3955 }
3956 else
3957 {
3958 /* MMX, [mem] */
3959 IEM_MC_BEGIN(0, 2);
3960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3961 IEM_MC_LOCAL(uint32_t, u32Tmp);
3962
3963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3965 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3966 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3967
3968 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3969 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
3970 IEM_MC_FPU_TO_MMX_MODE();
3971
3972 IEM_MC_ADVANCE_RIP();
3973 IEM_MC_END();
3974 }
3975 }
3976 return VINF_SUCCESS;
3977}
3978
3979FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3980{
3981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3982 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3983 {
3984 /**
3985 * @opcode 0x6e
3986 * @opcodesub rex.w=1
3987 * @oppfx 0x66
3988 * @opcpuid sse2
3989 * @opgroup og_sse2_simdint_datamove
3990 * @opxcpttype 5
3991 * @optest 64-bit / op1=1 op2=2 -> op1=2
3992 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3993 */
3994 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
3995 if (IEM_IS_MODRM_REG_MODE(bRm))
3996 {
3997 /* XMM, greg64 */
3998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3999 IEM_MC_BEGIN(0, 1);
4000 IEM_MC_LOCAL(uint64_t, u64Tmp);
4001
4002 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4003 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4004
4005 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4006 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4007
4008 IEM_MC_ADVANCE_RIP();
4009 IEM_MC_END();
4010 }
4011 else
4012 {
4013 /* XMM, [mem64] */
4014 IEM_MC_BEGIN(0, 2);
4015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4016 IEM_MC_LOCAL(uint64_t, u64Tmp);
4017
4018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4021 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4022
4023 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4024 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4025
4026 IEM_MC_ADVANCE_RIP();
4027 IEM_MC_END();
4028 }
4029 }
4030 else
4031 {
4032 /**
4033 * @opdone
4034 * @opcode 0x6e
4035 * @opcodesub rex.w=0
4036 * @oppfx 0x66
4037 * @opcpuid sse2
4038 * @opgroup og_sse2_simdint_datamove
4039 * @opxcpttype 5
4040 * @opfunction iemOp_movd_q_Vy_Ey
4041 * @optest op1=1 op2=2 -> op1=2
4042 * @optest op1=0 op2=-42 -> op1=-42
4043 */
4044 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4045 if (IEM_IS_MODRM_REG_MODE(bRm))
4046 {
4047 /* XMM, greg32 */
4048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4049 IEM_MC_BEGIN(0, 1);
4050 IEM_MC_LOCAL(uint32_t, u32Tmp);
4051
4052 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4053 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4054
4055 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4056 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4057
4058 IEM_MC_ADVANCE_RIP();
4059 IEM_MC_END();
4060 }
4061 else
4062 {
4063 /* XMM, [mem32] */
4064 IEM_MC_BEGIN(0, 2);
4065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4066 IEM_MC_LOCAL(uint32_t, u32Tmp);
4067
4068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4070 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4071 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4072
4073 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4074 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4075
4076 IEM_MC_ADVANCE_RIP();
4077 IEM_MC_END();
4078 }
4079 }
4080 return VINF_SUCCESS;
4081}
4082
4083/* Opcode 0xf3 0x0f 0x6e - invalid */
4084
4085
4086/**
4087 * @opcode 0x6f
4088 * @oppfx none
4089 * @opcpuid mmx
4090 * @opgroup og_mmx_datamove
4091 * @opxcpttype 5
4092 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4093 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4094 */
4095FNIEMOP_DEF(iemOp_movq_Pq_Qq)
4096{
4097 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4099 if (IEM_IS_MODRM_REG_MODE(bRm))
4100 {
4101 /*
4102 * Register, register.
4103 */
4104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4105 IEM_MC_BEGIN(0, 1);
4106 IEM_MC_LOCAL(uint64_t, u64Tmp);
4107
4108 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4109 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4110
4111 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
4112 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4113 IEM_MC_FPU_TO_MMX_MODE();
4114
4115 IEM_MC_ADVANCE_RIP();
4116 IEM_MC_END();
4117 }
4118 else
4119 {
4120 /*
4121 * Register, memory.
4122 */
4123 IEM_MC_BEGIN(0, 2);
4124 IEM_MC_LOCAL(uint64_t, u64Tmp);
4125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4126
4127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4130 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4131
4132 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4133 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4134 IEM_MC_FPU_TO_MMX_MODE();
4135
4136 IEM_MC_ADVANCE_RIP();
4137 IEM_MC_END();
4138 }
4139 return VINF_SUCCESS;
4140}
4141
4142/**
4143 * @opcode 0x6f
4144 * @oppfx 0x66
4145 * @opcpuid sse2
4146 * @opgroup og_sse2_simdint_datamove
4147 * @opxcpttype 1
4148 * @optest op1=1 op2=2 -> op1=2
4149 * @optest op1=0 op2=-42 -> op1=-42
4150 */
4151FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
4152{
4153 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4155 if (IEM_IS_MODRM_REG_MODE(bRm))
4156 {
4157 /*
4158 * Register, register.
4159 */
4160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4161 IEM_MC_BEGIN(0, 0);
4162
4163 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4164 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4165
4166 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4167 IEM_GET_MODRM_RM(pVCpu, bRm));
4168 IEM_MC_ADVANCE_RIP();
4169 IEM_MC_END();
4170 }
4171 else
4172 {
4173 /*
4174 * Register, memory.
4175 */
4176 IEM_MC_BEGIN(0, 2);
4177 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4179
4180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4182 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4183 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4184
4185 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4186 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4187
4188 IEM_MC_ADVANCE_RIP();
4189 IEM_MC_END();
4190 }
4191 return VINF_SUCCESS;
4192}
4193
4194/**
4195 * @opcode 0x6f
4196 * @oppfx 0xf3
4197 * @opcpuid sse2
4198 * @opgroup og_sse2_simdint_datamove
4199 * @opxcpttype 4UA
4200 * @optest op1=1 op2=2 -> op1=2
4201 * @optest op1=0 op2=-42 -> op1=-42
4202 */
4203FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
4204{
4205 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4207 if (IEM_IS_MODRM_REG_MODE(bRm))
4208 {
4209 /*
4210 * Register, register.
4211 */
4212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4213 IEM_MC_BEGIN(0, 0);
4214 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4215 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4216 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4217 IEM_GET_MODRM_RM(pVCpu, bRm));
4218 IEM_MC_ADVANCE_RIP();
4219 IEM_MC_END();
4220 }
4221 else
4222 {
4223 /*
4224 * Register, memory.
4225 */
4226 IEM_MC_BEGIN(0, 2);
4227 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4229
4230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4232 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4233 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4234 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4235 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4236
4237 IEM_MC_ADVANCE_RIP();
4238 IEM_MC_END();
4239 }
4240 return VINF_SUCCESS;
4241}
4242
4243
4244/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
4245FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
4246{
4247 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4249 if (IEM_IS_MODRM_REG_MODE(bRm))
4250 {
4251 /*
4252 * Register, register.
4253 */
4254 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4256
4257 IEM_MC_BEGIN(3, 0);
4258 IEM_MC_ARG(uint64_t *, pDst, 0);
4259 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4260 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4261 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4262 IEM_MC_PREPARE_FPU_USAGE();
4263 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4264 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
4265 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4266 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4267 IEM_MC_FPU_TO_MMX_MODE();
4268 IEM_MC_ADVANCE_RIP();
4269 IEM_MC_END();
4270 }
4271 else
4272 {
4273 /*
4274 * Register, memory.
4275 */
4276 IEM_MC_BEGIN(3, 2);
4277 IEM_MC_ARG(uint64_t *, pDst, 0);
4278 IEM_MC_LOCAL(uint64_t, uSrc);
4279 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4281
4282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4283 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4284 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4286 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4287
4288 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4289 IEM_MC_PREPARE_FPU_USAGE();
4290 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4291 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4292 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4293 IEM_MC_FPU_TO_MMX_MODE();
4294
4295 IEM_MC_ADVANCE_RIP();
4296 IEM_MC_END();
4297 }
4298 return VINF_SUCCESS;
4299}
4300
4301
4302/**
4303 * Common worker for SSE2 instructions on the forms:
4304 * pshufd xmm1, xmm2/mem128, imm8
4305 * pshufhw xmm1, xmm2/mem128, imm8
4306 * pshuflw xmm1, xmm2/mem128, imm8
4307 *
4308 * Proper alignment of the 128-bit operand is enforced.
4309 * Exceptions type 4. SSE2 cpuid checks.
4310 */
4311FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
4312{
4313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4314 if (IEM_IS_MODRM_REG_MODE(bRm))
4315 {
4316 /*
4317 * Register, register.
4318 */
4319 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4321
4322 IEM_MC_BEGIN(3, 0);
4323 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4324 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4325 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4326 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4327 IEM_MC_PREPARE_SSE_USAGE();
4328 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4329 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4330 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4331 IEM_MC_ADVANCE_RIP();
4332 IEM_MC_END();
4333 }
4334 else
4335 {
4336 /*
4337 * Register, memory.
4338 */
4339 IEM_MC_BEGIN(3, 2);
4340 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4341 IEM_MC_LOCAL(RTUINT128U, uSrc);
4342 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4344
4345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4346 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4347 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4349 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4350
4351 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4352 IEM_MC_PREPARE_SSE_USAGE();
4353 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4354 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4355
4356 IEM_MC_ADVANCE_RIP();
4357 IEM_MC_END();
4358 }
4359 return VINF_SUCCESS;
4360}
4361
4362
4363/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
4364FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
4365{
4366 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4367 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
4368}
4369
4370
4371/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
4372FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
4373{
4374 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4375 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
4376}
4377
4378
4379/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
4380FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
4381{
4382 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4383 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
4384}
4385
4386
4387/**
4388 * Common worker for MMX instructions of the form:
4389 * psrlw mm, imm8
4390 * psraw mm, imm8
4391 * psllw mm, imm8
4392 * psrld mm, imm8
4393 * psrad mm, imm8
4394 * pslld mm, imm8
4395 * psrlq mm, imm8
4396 * psllq mm, imm8
4397 *
4398 */
4399FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
4400{
4401 if (IEM_IS_MODRM_REG_MODE(bRm))
4402 {
4403 /*
4404 * Register, immediate.
4405 */
4406 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4408
4409 IEM_MC_BEGIN(2, 0);
4410 IEM_MC_ARG(uint64_t *, pDst, 0);
4411 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4412 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4413 IEM_MC_PREPARE_FPU_USAGE();
4414 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4415 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
4416 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4417 IEM_MC_FPU_TO_MMX_MODE();
4418 IEM_MC_ADVANCE_RIP();
4419 IEM_MC_END();
4420 }
4421 else
4422 {
4423 /*
4424 * Register, memory not supported.
4425 */
4426 /// @todo Caller already enforced register mode?!
4427 }
4428 return VINF_SUCCESS;
4429}
4430
4431
4432/**
4433 * Common worker for SSE2 instructions of the form:
4434 * psrlw xmm, imm8
4435 * psraw xmm, imm8
4436 * psllw xmm, imm8
4437 * psrld xmm, imm8
4438 * psrad xmm, imm8
4439 * pslld xmm, imm8
4440 * psrlq xmm, imm8
4441 * psllq xmm, imm8
4442 *
4443 */
4444FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
4445{
4446 if (IEM_IS_MODRM_REG_MODE(bRm))
4447 {
4448 /*
4449 * Register, immediate.
4450 */
4451 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4453
4454 IEM_MC_BEGIN(2, 0);
4455 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4456 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4457 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4458 IEM_MC_PREPARE_SSE_USAGE();
4459 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4460 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
4461 IEM_MC_ADVANCE_RIP();
4462 IEM_MC_END();
4463 }
4464 else
4465 {
4466 /*
4467 * Register, memory.
4468 */
4469 /// @todo Caller already enforced register mode?!
4470 }
4471 return VINF_SUCCESS;
4472}
4473
4474
4475/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
4476FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
4477{
4478// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4479 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
4480}
4481
4482
4483/** Opcode 0x66 0x0f 0x71 11/2. */
4484FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
4485{
4486// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4487 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
4488}
4489
4490
4491/** Opcode 0x0f 0x71 11/4. */
4492FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
4493{
4494// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4495 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
4496}
4497
4498
4499/** Opcode 0x66 0x0f 0x71 11/4. */
4500FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
4501{
4502// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4503 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
4504}
4505
4506
4507/** Opcode 0x0f 0x71 11/6. */
4508FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
4509{
4510// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4511 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
4512}
4513
4514
4515/** Opcode 0x66 0x0f 0x71 11/6. */
4516FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
4517{
4518// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4519 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
4520}
4521
4522
4523/**
4524 * Group 12 jump table for register variant.
4525 */
4526IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4527{
4528 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4529 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4530 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4531 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4532 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4533 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4534 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4535 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4536};
4537AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4538
4539
4540/** Opcode 0x0f 0x71. */
4541FNIEMOP_DEF(iemOp_Grp12)
4542{
4543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4544 if (IEM_IS_MODRM_REG_MODE(bRm))
4545 /* register, register */
4546 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4547 + pVCpu->iem.s.idxPrefix], bRm);
4548 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4549}
4550
4551
4552/** Opcode 0x0f 0x72 11/2. */
4553FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
4554{
4555// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4556 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
4557}
4558
4559
4560/** Opcode 0x66 0x0f 0x72 11/2. */
4561FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
4562{
4563// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4564 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
4565}
4566
4567
4568/** Opcode 0x0f 0x72 11/4. */
4569FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
4570{
4571// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4572 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
4573}
4574
4575
4576/** Opcode 0x66 0x0f 0x72 11/4. */
4577FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
4578{
4579// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4580 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
4581}
4582
4583
4584/** Opcode 0x0f 0x72 11/6. */
4585FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
4586{
4587// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4588 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
4589}
4590
4591/** Opcode 0x66 0x0f 0x72 11/6. */
4592FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
4593{
4594// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4595 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
4596}
4597
4598
4599/**
4600 * Group 13 jump table for register variant.
4601 */
4602IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4603{
4604 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4605 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4606 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4607 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4608 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4609 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4610 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4611 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4612};
4613AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4614
4615/** Opcode 0x0f 0x72. */
4616FNIEMOP_DEF(iemOp_Grp13)
4617{
4618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4619 if (IEM_IS_MODRM_REG_MODE(bRm))
4620 /* register, register */
4621 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4622 + pVCpu->iem.s.idxPrefix], bRm);
4623 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4624}
4625
4626
4627/** Opcode 0x0f 0x73 11/2. */
4628FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
4629{
4630// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4631 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
4632}
4633
4634
4635/** Opcode 0x66 0x0f 0x73 11/2. */
4636FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
4637{
4638// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4639 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
4640}
4641
4642
4643/** Opcode 0x66 0x0f 0x73 11/3. */
4644FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
4645{
4646// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4647 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
4648}
4649
4650
4651/** Opcode 0x0f 0x73 11/6. */
4652FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
4653{
4654// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4655 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
4656}
4657
4658
4659/** Opcode 0x66 0x0f 0x73 11/6. */
4660FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
4661{
4662// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4663 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
4664}
4665
4666
4667/** Opcode 0x66 0x0f 0x73 11/7. */
4668FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
4669{
4670// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4671 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
4672}
4673
4674/**
4675 * Group 14 jump table for register variant.
4676 */
4677IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4678{
4679 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4680 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4681 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4682 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4683 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4684 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4685 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4686 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4687};
4688AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4689
4690
4691/** Opcode 0x0f 0x73. */
4692FNIEMOP_DEF(iemOp_Grp14)
4693{
4694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4695 if (IEM_IS_MODRM_REG_MODE(bRm))
4696 /* register, register */
4697 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4698 + pVCpu->iem.s.idxPrefix], bRm);
4699 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4700}
4701
4702
4703/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4704FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4705{
4706 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4707 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
4708}
4709
4710
4711/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4712FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4713{
4714 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4715 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
4716}
4717
4718
4719/* Opcode 0xf3 0x0f 0x74 - invalid */
4720/* Opcode 0xf2 0x0f 0x74 - invalid */
4721
4722
4723/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4724FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4725{
4726 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4727 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
4728}
4729
4730
4731/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4732FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4733{
4734 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4735 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
4736}
4737
4738
4739/* Opcode 0xf3 0x0f 0x75 - invalid */
4740/* Opcode 0xf2 0x0f 0x75 - invalid */
4741
4742
4743/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4744FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4745{
4746 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4747 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
4748}
4749
4750
4751/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4752FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4753{
4754 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4755 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
4756}
4757
4758
4759/* Opcode 0xf3 0x0f 0x76 - invalid */
4760/* Opcode 0xf2 0x0f 0x76 - invalid */
4761
4762
4763/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4764FNIEMOP_DEF(iemOp_emms)
4765{
4766 IEMOP_MNEMONIC(emms, "emms");
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768
4769 IEM_MC_BEGIN(0,0);
4770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4771 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4772 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4773 IEM_MC_FPU_FROM_MMX_MODE();
4774 IEM_MC_ADVANCE_RIP();
4775 IEM_MC_END();
4776 return VINF_SUCCESS;
4777}
4778
4779/* Opcode 0x66 0x0f 0x77 - invalid */
4780/* Opcode 0xf3 0x0f 0x77 - invalid */
4781/* Opcode 0xf2 0x0f 0x77 - invalid */
4782
4783/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4784#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4785FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
4786{
4787 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
4788 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
4789 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
4790 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4791
4792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4793 if (IEM_IS_MODRM_REG_MODE(bRm))
4794 {
4795 /*
4796 * Register, register.
4797 */
4798 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4799 if (enmEffOpSize == IEMMODE_64BIT)
4800 {
4801 IEM_MC_BEGIN(2, 0);
4802 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4803 IEM_MC_ARG(uint64_t, u64Enc, 1);
4804 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4805 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
4806 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
4807 IEM_MC_END();
4808 }
4809 else
4810 {
4811 IEM_MC_BEGIN(2, 0);
4812 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4813 IEM_MC_ARG(uint32_t, u32Enc, 1);
4814 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4815 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
4816 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
4817 IEM_MC_END();
4818 }
4819 }
4820 else
4821 {
4822 /*
4823 * Memory, register.
4824 */
4825 if (enmEffOpSize == IEMMODE_64BIT)
4826 {
4827 IEM_MC_BEGIN(3, 0);
4828 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4829 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4830 IEM_MC_ARG(uint64_t, u64Enc, 2);
4831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4832 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4833 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4834 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4835 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
4836 IEM_MC_END();
4837 }
4838 else
4839 {
4840 IEM_MC_BEGIN(3, 0);
4841 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4842 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4843 IEM_MC_ARG(uint32_t, u32Enc, 2);
4844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4845 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4846 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4847 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4848 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
4849 IEM_MC_END();
4850 }
4851 }
4852 return VINF_SUCCESS;
4853}
4854#else
4855FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4856#endif
4857
4858/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4859FNIEMOP_STUB(iemOp_AmdGrp17);
4860/* Opcode 0xf3 0x0f 0x78 - invalid */
4861/* Opcode 0xf2 0x0f 0x78 - invalid */
4862
4863/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4864#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4865FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4866{
4867 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4868 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
4869 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
4870 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4871
4872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4873 if (IEM_IS_MODRM_REG_MODE(bRm))
4874 {
4875 /*
4876 * Register, register.
4877 */
4878 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4879 if (enmEffOpSize == IEMMODE_64BIT)
4880 {
4881 IEM_MC_BEGIN(2, 0);
4882 IEM_MC_ARG(uint64_t, u64Val, 0);
4883 IEM_MC_ARG(uint64_t, u64Enc, 1);
4884 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
4885 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4886 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
4887 IEM_MC_END();
4888 }
4889 else
4890 {
4891 IEM_MC_BEGIN(2, 0);
4892 IEM_MC_ARG(uint32_t, u32Val, 0);
4893 IEM_MC_ARG(uint32_t, u32Enc, 1);
4894 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
4895 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4896 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
4897 IEM_MC_END();
4898 }
4899 }
4900 else
4901 {
4902 /*
4903 * Register, memory.
4904 */
4905 if (enmEffOpSize == IEMMODE_64BIT)
4906 {
4907 IEM_MC_BEGIN(3, 0);
4908 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4909 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4910 IEM_MC_ARG(uint64_t, u64Enc, 2);
4911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4912 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4913 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4914 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4915 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
4916 IEM_MC_END();
4917 }
4918 else
4919 {
4920 IEM_MC_BEGIN(3, 0);
4921 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4922 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4923 IEM_MC_ARG(uint32_t, u32Enc, 2);
4924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4925 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4926 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
4927 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4928 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
4929 IEM_MC_END();
4930 }
4931 }
4932 return VINF_SUCCESS;
4933}
4934#else
4935FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4936#endif
4937/* Opcode 0x66 0x0f 0x79 - invalid */
4938/* Opcode 0xf3 0x0f 0x79 - invalid */
4939/* Opcode 0xf2 0x0f 0x79 - invalid */
4940
4941/* Opcode 0x0f 0x7a - invalid */
4942/* Opcode 0x66 0x0f 0x7a - invalid */
4943/* Opcode 0xf3 0x0f 0x7a - invalid */
4944/* Opcode 0xf2 0x0f 0x7a - invalid */
4945
4946/* Opcode 0x0f 0x7b - invalid */
4947/* Opcode 0x66 0x0f 0x7b - invalid */
4948/* Opcode 0xf3 0x0f 0x7b - invalid */
4949/* Opcode 0xf2 0x0f 0x7b - invalid */
4950
4951/* Opcode 0x0f 0x7c - invalid */
4952/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4953FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4954/* Opcode 0xf3 0x0f 0x7c - invalid */
4955/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4956FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4957
4958/* Opcode 0x0f 0x7d - invalid */
4959/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4960FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4961/* Opcode 0xf3 0x0f 0x7d - invalid */
4962/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4963FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4964
4965
4966/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4967FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4968{
4969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4970 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4971 {
4972 /**
4973 * @opcode 0x7e
4974 * @opcodesub rex.w=1
4975 * @oppfx none
4976 * @opcpuid mmx
4977 * @opgroup og_mmx_datamove
4978 * @opxcpttype 5
4979 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4980 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4981 */
4982 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4983 if (IEM_IS_MODRM_REG_MODE(bRm))
4984 {
4985 /* greg64, MMX */
4986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4987 IEM_MC_BEGIN(0, 1);
4988 IEM_MC_LOCAL(uint64_t, u64Tmp);
4989
4990 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4991 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4992
4993 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
4994 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4995 IEM_MC_FPU_TO_MMX_MODE();
4996
4997 IEM_MC_ADVANCE_RIP();
4998 IEM_MC_END();
4999 }
5000 else
5001 {
5002 /* [mem64], MMX */
5003 IEM_MC_BEGIN(0, 2);
5004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5005 IEM_MC_LOCAL(uint64_t, u64Tmp);
5006
5007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5009 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5010 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5011
5012 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5013 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5014 IEM_MC_FPU_TO_MMX_MODE();
5015
5016 IEM_MC_ADVANCE_RIP();
5017 IEM_MC_END();
5018 }
5019 }
5020 else
5021 {
5022 /**
5023 * @opdone
5024 * @opcode 0x7e
5025 * @opcodesub rex.w=0
5026 * @oppfx none
5027 * @opcpuid mmx
5028 * @opgroup og_mmx_datamove
5029 * @opxcpttype 5
5030 * @opfunction iemOp_movd_q_Pd_Ey
5031 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5032 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5033 */
5034 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5035 if (IEM_IS_MODRM_REG_MODE(bRm))
5036 {
5037 /* greg32, MMX */
5038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5039 IEM_MC_BEGIN(0, 1);
5040 IEM_MC_LOCAL(uint32_t, u32Tmp);
5041
5042 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5043 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5044
5045 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5046 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5047 IEM_MC_FPU_TO_MMX_MODE();
5048
5049 IEM_MC_ADVANCE_RIP();
5050 IEM_MC_END();
5051 }
5052 else
5053 {
5054 /* [mem32], MMX */
5055 IEM_MC_BEGIN(0, 2);
5056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5057 IEM_MC_LOCAL(uint32_t, u32Tmp);
5058
5059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5061 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5062 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5063
5064 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5065 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5066 IEM_MC_FPU_TO_MMX_MODE();
5067
5068 IEM_MC_ADVANCE_RIP();
5069 IEM_MC_END();
5070 }
5071 }
5072 return VINF_SUCCESS;
5073
5074}
5075
5076
5077FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
5078{
5079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5080 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5081 {
5082 /**
5083 * @opcode 0x7e
5084 * @opcodesub rex.w=1
5085 * @oppfx 0x66
5086 * @opcpuid sse2
5087 * @opgroup og_sse2_simdint_datamove
5088 * @opxcpttype 5
5089 * @optest 64-bit / op1=1 op2=2 -> op1=2
5090 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5091 */
5092 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5093 if (IEM_IS_MODRM_REG_MODE(bRm))
5094 {
5095 /* greg64, XMM */
5096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5097 IEM_MC_BEGIN(0, 1);
5098 IEM_MC_LOCAL(uint64_t, u64Tmp);
5099
5100 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5101 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5102
5103 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5104 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5105
5106 IEM_MC_ADVANCE_RIP();
5107 IEM_MC_END();
5108 }
5109 else
5110 {
5111 /* [mem64], XMM */
5112 IEM_MC_BEGIN(0, 2);
5113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5114 IEM_MC_LOCAL(uint64_t, u64Tmp);
5115
5116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5118 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5119 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5120
5121 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5122 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5123
5124 IEM_MC_ADVANCE_RIP();
5125 IEM_MC_END();
5126 }
5127 }
5128 else
5129 {
5130 /**
5131 * @opdone
5132 * @opcode 0x7e
5133 * @opcodesub rex.w=0
5134 * @oppfx 0x66
5135 * @opcpuid sse2
5136 * @opgroup og_sse2_simdint_datamove
5137 * @opxcpttype 5
5138 * @opfunction iemOp_movd_q_Vy_Ey
5139 * @optest op1=1 op2=2 -> op1=2
5140 * @optest op1=0 op2=-42 -> op1=-42
5141 */
5142 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5143 if (IEM_IS_MODRM_REG_MODE(bRm))
5144 {
5145 /* greg32, XMM */
5146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5147 IEM_MC_BEGIN(0, 1);
5148 IEM_MC_LOCAL(uint32_t, u32Tmp);
5149
5150 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5151 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5152
5153 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5154 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5155
5156 IEM_MC_ADVANCE_RIP();
5157 IEM_MC_END();
5158 }
5159 else
5160 {
5161 /* [mem32], XMM */
5162 IEM_MC_BEGIN(0, 2);
5163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5164 IEM_MC_LOCAL(uint32_t, u32Tmp);
5165
5166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5168 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5169 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5170
5171 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5172 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5173
5174 IEM_MC_ADVANCE_RIP();
5175 IEM_MC_END();
5176 }
5177 }
5178 return VINF_SUCCESS;
5179
5180}
5181
5182/**
5183 * @opcode 0x7e
5184 * @oppfx 0xf3
5185 * @opcpuid sse2
5186 * @opgroup og_sse2_pcksclr_datamove
5187 * @opxcpttype none
5188 * @optest op1=1 op2=2 -> op1=2
5189 * @optest op1=0 op2=-42 -> op1=-42
5190 */
5191FNIEMOP_DEF(iemOp_movq_Vq_Wq)
5192{
5193 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5195 if (IEM_IS_MODRM_REG_MODE(bRm))
5196 {
5197 /*
5198 * Register, register.
5199 */
5200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5201 IEM_MC_BEGIN(0, 2);
5202 IEM_MC_LOCAL(uint64_t, uSrc);
5203
5204 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5205 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5206
5207 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5208 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5209
5210 IEM_MC_ADVANCE_RIP();
5211 IEM_MC_END();
5212 }
5213 else
5214 {
5215 /*
5216 * Memory, register.
5217 */
5218 IEM_MC_BEGIN(0, 2);
5219 IEM_MC_LOCAL(uint64_t, uSrc);
5220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5221
5222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5225 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5226
5227 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5228 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5229
5230 IEM_MC_ADVANCE_RIP();
5231 IEM_MC_END();
5232 }
5233 return VINF_SUCCESS;
5234}
5235
5236/* Opcode 0xf2 0x0f 0x7e - invalid */
5237
5238
5239/** Opcode 0x0f 0x7f - movq Qq, Pq */
5240FNIEMOP_DEF(iemOp_movq_Qq_Pq)
5241{
5242 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
5243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5244 if (IEM_IS_MODRM_REG_MODE(bRm))
5245 {
5246 /*
5247 * Register, register.
5248 */
5249 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
5250 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
5251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5252 IEM_MC_BEGIN(0, 1);
5253 IEM_MC_LOCAL(uint64_t, u64Tmp);
5254 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5255 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5256 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5257 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
5258 IEM_MC_FPU_TO_MMX_MODE();
5259 IEM_MC_ADVANCE_RIP();
5260 IEM_MC_END();
5261 }
5262 else
5263 {
5264 /*
5265 * Memory, Register.
5266 */
5267 IEM_MC_BEGIN(0, 2);
5268 IEM_MC_LOCAL(uint64_t, u64Tmp);
5269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5270
5271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5273 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5274 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5275
5276 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5277 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5278 IEM_MC_FPU_TO_MMX_MODE();
5279
5280 IEM_MC_ADVANCE_RIP();
5281 IEM_MC_END();
5282 }
5283 return VINF_SUCCESS;
5284}
5285
5286/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
5287FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
5288{
5289 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5291 if (IEM_IS_MODRM_REG_MODE(bRm))
5292 {
5293 /*
5294 * Register, register.
5295 */
5296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5297 IEM_MC_BEGIN(0, 0);
5298 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5299 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5300 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5301 IEM_GET_MODRM_REG(pVCpu, bRm));
5302 IEM_MC_ADVANCE_RIP();
5303 IEM_MC_END();
5304 }
5305 else
5306 {
5307 /*
5308 * Register, memory.
5309 */
5310 IEM_MC_BEGIN(0, 2);
5311 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5313
5314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5316 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5317 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5318
5319 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5320 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5321
5322 IEM_MC_ADVANCE_RIP();
5323 IEM_MC_END();
5324 }
5325 return VINF_SUCCESS;
5326}
5327
5328/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
5329FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
5330{
5331 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5333 if (IEM_IS_MODRM_REG_MODE(bRm))
5334 {
5335 /*
5336 * Register, register.
5337 */
5338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5339 IEM_MC_BEGIN(0, 0);
5340 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5341 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5342 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5343 IEM_GET_MODRM_REG(pVCpu, bRm));
5344 IEM_MC_ADVANCE_RIP();
5345 IEM_MC_END();
5346 }
5347 else
5348 {
5349 /*
5350 * Register, memory.
5351 */
5352 IEM_MC_BEGIN(0, 2);
5353 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5355
5356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5358 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5359 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5360
5361 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5362 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5363
5364 IEM_MC_ADVANCE_RIP();
5365 IEM_MC_END();
5366 }
5367 return VINF_SUCCESS;
5368}
5369
5370/* Opcode 0xf2 0x0f 0x7f - invalid */
5371
5372
5373
5374/** Opcode 0x0f 0x80. */
5375FNIEMOP_DEF(iemOp_jo_Jv)
5376{
5377 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
5378 IEMOP_HLP_MIN_386();
5379 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5380 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5381 {
5382 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5384
5385 IEM_MC_BEGIN(0, 0);
5386 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5387 IEM_MC_REL_JMP_S16(i16Imm);
5388 } IEM_MC_ELSE() {
5389 IEM_MC_ADVANCE_RIP();
5390 } IEM_MC_ENDIF();
5391 IEM_MC_END();
5392 }
5393 else
5394 {
5395 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5397
5398 IEM_MC_BEGIN(0, 0);
5399 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5400 IEM_MC_REL_JMP_S32(i32Imm);
5401 } IEM_MC_ELSE() {
5402 IEM_MC_ADVANCE_RIP();
5403 } IEM_MC_ENDIF();
5404 IEM_MC_END();
5405 }
5406 return VINF_SUCCESS;
5407}
5408
5409
5410/** Opcode 0x0f 0x81. */
5411FNIEMOP_DEF(iemOp_jno_Jv)
5412{
5413 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
5414 IEMOP_HLP_MIN_386();
5415 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5416 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5417 {
5418 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5420
5421 IEM_MC_BEGIN(0, 0);
5422 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5423 IEM_MC_ADVANCE_RIP();
5424 } IEM_MC_ELSE() {
5425 IEM_MC_REL_JMP_S16(i16Imm);
5426 } IEM_MC_ENDIF();
5427 IEM_MC_END();
5428 }
5429 else
5430 {
5431 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5433
5434 IEM_MC_BEGIN(0, 0);
5435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5436 IEM_MC_ADVANCE_RIP();
5437 } IEM_MC_ELSE() {
5438 IEM_MC_REL_JMP_S32(i32Imm);
5439 } IEM_MC_ENDIF();
5440 IEM_MC_END();
5441 }
5442 return VINF_SUCCESS;
5443}
5444
5445
5446/** Opcode 0x0f 0x82. */
5447FNIEMOP_DEF(iemOp_jc_Jv)
5448{
5449 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
5450 IEMOP_HLP_MIN_386();
5451 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5452 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5453 {
5454 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5456
5457 IEM_MC_BEGIN(0, 0);
5458 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5459 IEM_MC_REL_JMP_S16(i16Imm);
5460 } IEM_MC_ELSE() {
5461 IEM_MC_ADVANCE_RIP();
5462 } IEM_MC_ENDIF();
5463 IEM_MC_END();
5464 }
5465 else
5466 {
5467 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469
5470 IEM_MC_BEGIN(0, 0);
5471 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5472 IEM_MC_REL_JMP_S32(i32Imm);
5473 } IEM_MC_ELSE() {
5474 IEM_MC_ADVANCE_RIP();
5475 } IEM_MC_ENDIF();
5476 IEM_MC_END();
5477 }
5478 return VINF_SUCCESS;
5479}
5480
5481
5482/** Opcode 0x0f 0x83. */
5483FNIEMOP_DEF(iemOp_jnc_Jv)
5484{
5485 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5486 IEMOP_HLP_MIN_386();
5487 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5488 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5489 {
5490 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5492
5493 IEM_MC_BEGIN(0, 0);
5494 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5495 IEM_MC_ADVANCE_RIP();
5496 } IEM_MC_ELSE() {
5497 IEM_MC_REL_JMP_S16(i16Imm);
5498 } IEM_MC_ENDIF();
5499 IEM_MC_END();
5500 }
5501 else
5502 {
5503 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505
5506 IEM_MC_BEGIN(0, 0);
5507 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5508 IEM_MC_ADVANCE_RIP();
5509 } IEM_MC_ELSE() {
5510 IEM_MC_REL_JMP_S32(i32Imm);
5511 } IEM_MC_ENDIF();
5512 IEM_MC_END();
5513 }
5514 return VINF_SUCCESS;
5515}
5516
5517
5518/** Opcode 0x0f 0x84. */
5519FNIEMOP_DEF(iemOp_je_Jv)
5520{
5521 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5522 IEMOP_HLP_MIN_386();
5523 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5524 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5525 {
5526 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5528
5529 IEM_MC_BEGIN(0, 0);
5530 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5531 IEM_MC_REL_JMP_S16(i16Imm);
5532 } IEM_MC_ELSE() {
5533 IEM_MC_ADVANCE_RIP();
5534 } IEM_MC_ENDIF();
5535 IEM_MC_END();
5536 }
5537 else
5538 {
5539 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5541
5542 IEM_MC_BEGIN(0, 0);
5543 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5544 IEM_MC_REL_JMP_S32(i32Imm);
5545 } IEM_MC_ELSE() {
5546 IEM_MC_ADVANCE_RIP();
5547 } IEM_MC_ENDIF();
5548 IEM_MC_END();
5549 }
5550 return VINF_SUCCESS;
5551}
5552
5553
5554/** Opcode 0x0f 0x85. */
5555FNIEMOP_DEF(iemOp_jne_Jv)
5556{
5557 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5558 IEMOP_HLP_MIN_386();
5559 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5560 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5561 {
5562 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5564
5565 IEM_MC_BEGIN(0, 0);
5566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5567 IEM_MC_ADVANCE_RIP();
5568 } IEM_MC_ELSE() {
5569 IEM_MC_REL_JMP_S16(i16Imm);
5570 } IEM_MC_ENDIF();
5571 IEM_MC_END();
5572 }
5573 else
5574 {
5575 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5577
5578 IEM_MC_BEGIN(0, 0);
5579 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5580 IEM_MC_ADVANCE_RIP();
5581 } IEM_MC_ELSE() {
5582 IEM_MC_REL_JMP_S32(i32Imm);
5583 } IEM_MC_ENDIF();
5584 IEM_MC_END();
5585 }
5586 return VINF_SUCCESS;
5587}
5588
5589
5590/** Opcode 0x0f 0x86. */
5591FNIEMOP_DEF(iemOp_jbe_Jv)
5592{
5593 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5594 IEMOP_HLP_MIN_386();
5595 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5596 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5597 {
5598 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5600
5601 IEM_MC_BEGIN(0, 0);
5602 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5603 IEM_MC_REL_JMP_S16(i16Imm);
5604 } IEM_MC_ELSE() {
5605 IEM_MC_ADVANCE_RIP();
5606 } IEM_MC_ENDIF();
5607 IEM_MC_END();
5608 }
5609 else
5610 {
5611 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5613
5614 IEM_MC_BEGIN(0, 0);
5615 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5616 IEM_MC_REL_JMP_S32(i32Imm);
5617 } IEM_MC_ELSE() {
5618 IEM_MC_ADVANCE_RIP();
5619 } IEM_MC_ENDIF();
5620 IEM_MC_END();
5621 }
5622 return VINF_SUCCESS;
5623}
5624
5625
5626/** Opcode 0x0f 0x87. */
5627FNIEMOP_DEF(iemOp_jnbe_Jv)
5628{
5629 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5630 IEMOP_HLP_MIN_386();
5631 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5632 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5633 {
5634 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5636
5637 IEM_MC_BEGIN(0, 0);
5638 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5639 IEM_MC_ADVANCE_RIP();
5640 } IEM_MC_ELSE() {
5641 IEM_MC_REL_JMP_S16(i16Imm);
5642 } IEM_MC_ENDIF();
5643 IEM_MC_END();
5644 }
5645 else
5646 {
5647 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5649
5650 IEM_MC_BEGIN(0, 0);
5651 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5652 IEM_MC_ADVANCE_RIP();
5653 } IEM_MC_ELSE() {
5654 IEM_MC_REL_JMP_S32(i32Imm);
5655 } IEM_MC_ENDIF();
5656 IEM_MC_END();
5657 }
5658 return VINF_SUCCESS;
5659}
5660
5661
5662/** Opcode 0x0f 0x88. */
5663FNIEMOP_DEF(iemOp_js_Jv)
5664{
5665 IEMOP_MNEMONIC(js_Jv, "js Jv");
5666 IEMOP_HLP_MIN_386();
5667 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5668 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5669 {
5670 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5672
5673 IEM_MC_BEGIN(0, 0);
5674 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5675 IEM_MC_REL_JMP_S16(i16Imm);
5676 } IEM_MC_ELSE() {
5677 IEM_MC_ADVANCE_RIP();
5678 } IEM_MC_ENDIF();
5679 IEM_MC_END();
5680 }
5681 else
5682 {
5683 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5685
5686 IEM_MC_BEGIN(0, 0);
5687 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5688 IEM_MC_REL_JMP_S32(i32Imm);
5689 } IEM_MC_ELSE() {
5690 IEM_MC_ADVANCE_RIP();
5691 } IEM_MC_ENDIF();
5692 IEM_MC_END();
5693 }
5694 return VINF_SUCCESS;
5695}
5696
5697
5698/** Opcode 0x0f 0x89. */
5699FNIEMOP_DEF(iemOp_jns_Jv)
5700{
5701 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5702 IEMOP_HLP_MIN_386();
5703 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5704 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5705 {
5706 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5708
5709 IEM_MC_BEGIN(0, 0);
5710 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5711 IEM_MC_ADVANCE_RIP();
5712 } IEM_MC_ELSE() {
5713 IEM_MC_REL_JMP_S16(i16Imm);
5714 } IEM_MC_ENDIF();
5715 IEM_MC_END();
5716 }
5717 else
5718 {
5719 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5721
5722 IEM_MC_BEGIN(0, 0);
5723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5724 IEM_MC_ADVANCE_RIP();
5725 } IEM_MC_ELSE() {
5726 IEM_MC_REL_JMP_S32(i32Imm);
5727 } IEM_MC_ENDIF();
5728 IEM_MC_END();
5729 }
5730 return VINF_SUCCESS;
5731}
5732
5733
5734/** Opcode 0x0f 0x8a. */
5735FNIEMOP_DEF(iemOp_jp_Jv)
5736{
5737 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5738 IEMOP_HLP_MIN_386();
5739 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5740 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5741 {
5742 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5744
5745 IEM_MC_BEGIN(0, 0);
5746 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5747 IEM_MC_REL_JMP_S16(i16Imm);
5748 } IEM_MC_ELSE() {
5749 IEM_MC_ADVANCE_RIP();
5750 } IEM_MC_ENDIF();
5751 IEM_MC_END();
5752 }
5753 else
5754 {
5755 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5757
5758 IEM_MC_BEGIN(0, 0);
5759 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5760 IEM_MC_REL_JMP_S32(i32Imm);
5761 } IEM_MC_ELSE() {
5762 IEM_MC_ADVANCE_RIP();
5763 } IEM_MC_ENDIF();
5764 IEM_MC_END();
5765 }
5766 return VINF_SUCCESS;
5767}
5768
5769
5770/** Opcode 0x0f 0x8b. */
5771FNIEMOP_DEF(iemOp_jnp_Jv)
5772{
5773 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5774 IEMOP_HLP_MIN_386();
5775 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5776 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5777 {
5778 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5780
5781 IEM_MC_BEGIN(0, 0);
5782 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5783 IEM_MC_ADVANCE_RIP();
5784 } IEM_MC_ELSE() {
5785 IEM_MC_REL_JMP_S16(i16Imm);
5786 } IEM_MC_ENDIF();
5787 IEM_MC_END();
5788 }
5789 else
5790 {
5791 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5793
5794 IEM_MC_BEGIN(0, 0);
5795 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5796 IEM_MC_ADVANCE_RIP();
5797 } IEM_MC_ELSE() {
5798 IEM_MC_REL_JMP_S32(i32Imm);
5799 } IEM_MC_ENDIF();
5800 IEM_MC_END();
5801 }
5802 return VINF_SUCCESS;
5803}
5804
5805
5806/** Opcode 0x0f 0x8c. */
5807FNIEMOP_DEF(iemOp_jl_Jv)
5808{
5809 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5810 IEMOP_HLP_MIN_386();
5811 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5812 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5813 {
5814 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5816
5817 IEM_MC_BEGIN(0, 0);
5818 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5819 IEM_MC_REL_JMP_S16(i16Imm);
5820 } IEM_MC_ELSE() {
5821 IEM_MC_ADVANCE_RIP();
5822 } IEM_MC_ENDIF();
5823 IEM_MC_END();
5824 }
5825 else
5826 {
5827 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5829
5830 IEM_MC_BEGIN(0, 0);
5831 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5832 IEM_MC_REL_JMP_S32(i32Imm);
5833 } IEM_MC_ELSE() {
5834 IEM_MC_ADVANCE_RIP();
5835 } IEM_MC_ENDIF();
5836 IEM_MC_END();
5837 }
5838 return VINF_SUCCESS;
5839}
5840
5841
5842/** Opcode 0x0f 0x8d. */
5843FNIEMOP_DEF(iemOp_jnl_Jv)
5844{
5845 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5846 IEMOP_HLP_MIN_386();
5847 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5848 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5849 {
5850 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5852
5853 IEM_MC_BEGIN(0, 0);
5854 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5855 IEM_MC_ADVANCE_RIP();
5856 } IEM_MC_ELSE() {
5857 IEM_MC_REL_JMP_S16(i16Imm);
5858 } IEM_MC_ENDIF();
5859 IEM_MC_END();
5860 }
5861 else
5862 {
5863 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5865
5866 IEM_MC_BEGIN(0, 0);
5867 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5868 IEM_MC_ADVANCE_RIP();
5869 } IEM_MC_ELSE() {
5870 IEM_MC_REL_JMP_S32(i32Imm);
5871 } IEM_MC_ENDIF();
5872 IEM_MC_END();
5873 }
5874 return VINF_SUCCESS;
5875}
5876
5877
5878/** Opcode 0x0f 0x8e. */
5879FNIEMOP_DEF(iemOp_jle_Jv)
5880{
5881 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5882 IEMOP_HLP_MIN_386();
5883 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5884 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5885 {
5886 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5888
5889 IEM_MC_BEGIN(0, 0);
5890 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5891 IEM_MC_REL_JMP_S16(i16Imm);
5892 } IEM_MC_ELSE() {
5893 IEM_MC_ADVANCE_RIP();
5894 } IEM_MC_ENDIF();
5895 IEM_MC_END();
5896 }
5897 else
5898 {
5899 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5901
5902 IEM_MC_BEGIN(0, 0);
5903 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5904 IEM_MC_REL_JMP_S32(i32Imm);
5905 } IEM_MC_ELSE() {
5906 IEM_MC_ADVANCE_RIP();
5907 } IEM_MC_ENDIF();
5908 IEM_MC_END();
5909 }
5910 return VINF_SUCCESS;
5911}
5912
5913
5914/** Opcode 0x0f 0x8f. */
5915FNIEMOP_DEF(iemOp_jnle_Jv)
5916{
5917 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5918 IEMOP_HLP_MIN_386();
5919 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5920 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5921 {
5922 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5924
5925 IEM_MC_BEGIN(0, 0);
5926 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5927 IEM_MC_ADVANCE_RIP();
5928 } IEM_MC_ELSE() {
5929 IEM_MC_REL_JMP_S16(i16Imm);
5930 } IEM_MC_ENDIF();
5931 IEM_MC_END();
5932 }
5933 else
5934 {
5935 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5937
5938 IEM_MC_BEGIN(0, 0);
5939 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5940 IEM_MC_ADVANCE_RIP();
5941 } IEM_MC_ELSE() {
5942 IEM_MC_REL_JMP_S32(i32Imm);
5943 } IEM_MC_ENDIF();
5944 IEM_MC_END();
5945 }
5946 return VINF_SUCCESS;
5947}
5948
5949
5950/** Opcode 0x0f 0x90. */
5951FNIEMOP_DEF(iemOp_seto_Eb)
5952{
5953 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5954 IEMOP_HLP_MIN_386();
5955 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5956
5957 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5958 * any way. AMD says it's "unused", whatever that means. We're
5959 * ignoring for now. */
5960 if (IEM_IS_MODRM_REG_MODE(bRm))
5961 {
5962 /* register target */
5963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5964 IEM_MC_BEGIN(0, 0);
5965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5966 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
5967 } IEM_MC_ELSE() {
5968 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
5969 } IEM_MC_ENDIF();
5970 IEM_MC_ADVANCE_RIP();
5971 IEM_MC_END();
5972 }
5973 else
5974 {
5975 /* memory target */
5976 IEM_MC_BEGIN(0, 1);
5977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5980 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5981 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5982 } IEM_MC_ELSE() {
5983 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5984 } IEM_MC_ENDIF();
5985 IEM_MC_ADVANCE_RIP();
5986 IEM_MC_END();
5987 }
5988 return VINF_SUCCESS;
5989}
5990
5991
5992/** Opcode 0x0f 0x91. */
5993FNIEMOP_DEF(iemOp_setno_Eb)
5994{
5995 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5996 IEMOP_HLP_MIN_386();
5997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5998
5999 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6000 * any way. AMD says it's "unused", whatever that means. We're
6001 * ignoring for now. */
6002 if (IEM_IS_MODRM_REG_MODE(bRm))
6003 {
6004 /* register target */
6005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6006 IEM_MC_BEGIN(0, 0);
6007 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6008 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6009 } IEM_MC_ELSE() {
6010 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6011 } IEM_MC_ENDIF();
6012 IEM_MC_ADVANCE_RIP();
6013 IEM_MC_END();
6014 }
6015 else
6016 {
6017 /* memory target */
6018 IEM_MC_BEGIN(0, 1);
6019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6022 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6023 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6024 } IEM_MC_ELSE() {
6025 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6026 } IEM_MC_ENDIF();
6027 IEM_MC_ADVANCE_RIP();
6028 IEM_MC_END();
6029 }
6030 return VINF_SUCCESS;
6031}
6032
6033
6034/** Opcode 0x0f 0x92. */
6035FNIEMOP_DEF(iemOp_setc_Eb)
6036{
6037 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
6038 IEMOP_HLP_MIN_386();
6039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6040
6041 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6042 * any way. AMD says it's "unused", whatever that means. We're
6043 * ignoring for now. */
6044 if (IEM_IS_MODRM_REG_MODE(bRm))
6045 {
6046 /* register target */
6047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6048 IEM_MC_BEGIN(0, 0);
6049 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6050 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6051 } IEM_MC_ELSE() {
6052 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6053 } IEM_MC_ENDIF();
6054 IEM_MC_ADVANCE_RIP();
6055 IEM_MC_END();
6056 }
6057 else
6058 {
6059 /* memory target */
6060 IEM_MC_BEGIN(0, 1);
6061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6064 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6065 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6066 } IEM_MC_ELSE() {
6067 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6068 } IEM_MC_ENDIF();
6069 IEM_MC_ADVANCE_RIP();
6070 IEM_MC_END();
6071 }
6072 return VINF_SUCCESS;
6073}
6074
6075
6076/** Opcode 0x0f 0x93. */
6077FNIEMOP_DEF(iemOp_setnc_Eb)
6078{
6079 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
6080 IEMOP_HLP_MIN_386();
6081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6082
6083 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6084 * any way. AMD says it's "unused", whatever that means. We're
6085 * ignoring for now. */
6086 if (IEM_IS_MODRM_REG_MODE(bRm))
6087 {
6088 /* register target */
6089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6090 IEM_MC_BEGIN(0, 0);
6091 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6092 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6093 } IEM_MC_ELSE() {
6094 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6095 } IEM_MC_ENDIF();
6096 IEM_MC_ADVANCE_RIP();
6097 IEM_MC_END();
6098 }
6099 else
6100 {
6101 /* memory target */
6102 IEM_MC_BEGIN(0, 1);
6103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6106 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6107 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6108 } IEM_MC_ELSE() {
6109 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6110 } IEM_MC_ENDIF();
6111 IEM_MC_ADVANCE_RIP();
6112 IEM_MC_END();
6113 }
6114 return VINF_SUCCESS;
6115}
6116
6117
6118/** Opcode 0x0f 0x94. */
6119FNIEMOP_DEF(iemOp_sete_Eb)
6120{
6121 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
6122 IEMOP_HLP_MIN_386();
6123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6124
6125 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6126 * any way. AMD says it's "unused", whatever that means. We're
6127 * ignoring for now. */
6128 if (IEM_IS_MODRM_REG_MODE(bRm))
6129 {
6130 /* register target */
6131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6132 IEM_MC_BEGIN(0, 0);
6133 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6134 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6135 } IEM_MC_ELSE() {
6136 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6137 } IEM_MC_ENDIF();
6138 IEM_MC_ADVANCE_RIP();
6139 IEM_MC_END();
6140 }
6141 else
6142 {
6143 /* memory target */
6144 IEM_MC_BEGIN(0, 1);
6145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6148 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6149 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6150 } IEM_MC_ELSE() {
6151 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6152 } IEM_MC_ENDIF();
6153 IEM_MC_ADVANCE_RIP();
6154 IEM_MC_END();
6155 }
6156 return VINF_SUCCESS;
6157}
6158
6159
6160/** Opcode 0x0f 0x95. */
6161FNIEMOP_DEF(iemOp_setne_Eb)
6162{
6163 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
6164 IEMOP_HLP_MIN_386();
6165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6166
6167 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6168 * any way. AMD says it's "unused", whatever that means. We're
6169 * ignoring for now. */
6170 if (IEM_IS_MODRM_REG_MODE(bRm))
6171 {
6172 /* register target */
6173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6174 IEM_MC_BEGIN(0, 0);
6175 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6176 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6177 } IEM_MC_ELSE() {
6178 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6179 } IEM_MC_ENDIF();
6180 IEM_MC_ADVANCE_RIP();
6181 IEM_MC_END();
6182 }
6183 else
6184 {
6185 /* memory target */
6186 IEM_MC_BEGIN(0, 1);
6187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6190 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6191 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6192 } IEM_MC_ELSE() {
6193 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6194 } IEM_MC_ENDIF();
6195 IEM_MC_ADVANCE_RIP();
6196 IEM_MC_END();
6197 }
6198 return VINF_SUCCESS;
6199}
6200
6201
6202/** Opcode 0x0f 0x96. */
6203FNIEMOP_DEF(iemOp_setbe_Eb)
6204{
6205 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
6206 IEMOP_HLP_MIN_386();
6207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6208
6209 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6210 * any way. AMD says it's "unused", whatever that means. We're
6211 * ignoring for now. */
6212 if (IEM_IS_MODRM_REG_MODE(bRm))
6213 {
6214 /* register target */
6215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6216 IEM_MC_BEGIN(0, 0);
6217 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6218 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6219 } IEM_MC_ELSE() {
6220 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6221 } IEM_MC_ENDIF();
6222 IEM_MC_ADVANCE_RIP();
6223 IEM_MC_END();
6224 }
6225 else
6226 {
6227 /* memory target */
6228 IEM_MC_BEGIN(0, 1);
6229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6232 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6233 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6234 } IEM_MC_ELSE() {
6235 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6236 } IEM_MC_ENDIF();
6237 IEM_MC_ADVANCE_RIP();
6238 IEM_MC_END();
6239 }
6240 return VINF_SUCCESS;
6241}
6242
6243
6244/** Opcode 0x0f 0x97. */
6245FNIEMOP_DEF(iemOp_setnbe_Eb)
6246{
6247 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
6248 IEMOP_HLP_MIN_386();
6249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6250
6251 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6252 * any way. AMD says it's "unused", whatever that means. We're
6253 * ignoring for now. */
6254 if (IEM_IS_MODRM_REG_MODE(bRm))
6255 {
6256 /* register target */
6257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6258 IEM_MC_BEGIN(0, 0);
6259 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6260 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6261 } IEM_MC_ELSE() {
6262 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6263 } IEM_MC_ENDIF();
6264 IEM_MC_ADVANCE_RIP();
6265 IEM_MC_END();
6266 }
6267 else
6268 {
6269 /* memory target */
6270 IEM_MC_BEGIN(0, 1);
6271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6274 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6275 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6276 } IEM_MC_ELSE() {
6277 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6278 } IEM_MC_ENDIF();
6279 IEM_MC_ADVANCE_RIP();
6280 IEM_MC_END();
6281 }
6282 return VINF_SUCCESS;
6283}
6284
6285
6286/** Opcode 0x0f 0x98. */
6287FNIEMOP_DEF(iemOp_sets_Eb)
6288{
6289 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
6290 IEMOP_HLP_MIN_386();
6291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6292
6293 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6294 * any way. AMD says it's "unused", whatever that means. We're
6295 * ignoring for now. */
6296 if (IEM_IS_MODRM_REG_MODE(bRm))
6297 {
6298 /* register target */
6299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6300 IEM_MC_BEGIN(0, 0);
6301 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6302 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6303 } IEM_MC_ELSE() {
6304 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6305 } IEM_MC_ENDIF();
6306 IEM_MC_ADVANCE_RIP();
6307 IEM_MC_END();
6308 }
6309 else
6310 {
6311 /* memory target */
6312 IEM_MC_BEGIN(0, 1);
6313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6316 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6317 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6318 } IEM_MC_ELSE() {
6319 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6320 } IEM_MC_ENDIF();
6321 IEM_MC_ADVANCE_RIP();
6322 IEM_MC_END();
6323 }
6324 return VINF_SUCCESS;
6325}
6326
6327
6328/** Opcode 0x0f 0x99. */
6329FNIEMOP_DEF(iemOp_setns_Eb)
6330{
6331 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
6332 IEMOP_HLP_MIN_386();
6333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6334
6335 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6336 * any way. AMD says it's "unused", whatever that means. We're
6337 * ignoring for now. */
6338 if (IEM_IS_MODRM_REG_MODE(bRm))
6339 {
6340 /* register target */
6341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6342 IEM_MC_BEGIN(0, 0);
6343 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6344 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6345 } IEM_MC_ELSE() {
6346 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6347 } IEM_MC_ENDIF();
6348 IEM_MC_ADVANCE_RIP();
6349 IEM_MC_END();
6350 }
6351 else
6352 {
6353 /* memory target */
6354 IEM_MC_BEGIN(0, 1);
6355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6358 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6359 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6360 } IEM_MC_ELSE() {
6361 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6362 } IEM_MC_ENDIF();
6363 IEM_MC_ADVANCE_RIP();
6364 IEM_MC_END();
6365 }
6366 return VINF_SUCCESS;
6367}
6368
6369
6370/** Opcode 0x0f 0x9a. */
6371FNIEMOP_DEF(iemOp_setp_Eb)
6372{
6373 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
6374 IEMOP_HLP_MIN_386();
6375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6376
6377 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6378 * any way. AMD says it's "unused", whatever that means. We're
6379 * ignoring for now. */
6380 if (IEM_IS_MODRM_REG_MODE(bRm))
6381 {
6382 /* register target */
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6384 IEM_MC_BEGIN(0, 0);
6385 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6386 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6387 } IEM_MC_ELSE() {
6388 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6389 } IEM_MC_ENDIF();
6390 IEM_MC_ADVANCE_RIP();
6391 IEM_MC_END();
6392 }
6393 else
6394 {
6395 /* memory target */
6396 IEM_MC_BEGIN(0, 1);
6397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6401 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6402 } IEM_MC_ELSE() {
6403 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6404 } IEM_MC_ENDIF();
6405 IEM_MC_ADVANCE_RIP();
6406 IEM_MC_END();
6407 }
6408 return VINF_SUCCESS;
6409}
6410
6411
6412/** Opcode 0x0f 0x9b. */
6413FNIEMOP_DEF(iemOp_setnp_Eb)
6414{
6415 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
6416 IEMOP_HLP_MIN_386();
6417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6418
6419 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6420 * any way. AMD says it's "unused", whatever that means. We're
6421 * ignoring for now. */
6422 if (IEM_IS_MODRM_REG_MODE(bRm))
6423 {
6424 /* register target */
6425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6426 IEM_MC_BEGIN(0, 0);
6427 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6428 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6429 } IEM_MC_ELSE() {
6430 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6431 } IEM_MC_ENDIF();
6432 IEM_MC_ADVANCE_RIP();
6433 IEM_MC_END();
6434 }
6435 else
6436 {
6437 /* memory target */
6438 IEM_MC_BEGIN(0, 1);
6439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6442 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6443 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6444 } IEM_MC_ELSE() {
6445 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6446 } IEM_MC_ENDIF();
6447 IEM_MC_ADVANCE_RIP();
6448 IEM_MC_END();
6449 }
6450 return VINF_SUCCESS;
6451}
6452
6453
6454/** Opcode 0x0f 0x9c. */
6455FNIEMOP_DEF(iemOp_setl_Eb)
6456{
6457 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
6458 IEMOP_HLP_MIN_386();
6459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6460
6461 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6462 * any way. AMD says it's "unused", whatever that means. We're
6463 * ignoring for now. */
6464 if (IEM_IS_MODRM_REG_MODE(bRm))
6465 {
6466 /* register target */
6467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6468 IEM_MC_BEGIN(0, 0);
6469 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6470 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6471 } IEM_MC_ELSE() {
6472 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6473 } IEM_MC_ENDIF();
6474 IEM_MC_ADVANCE_RIP();
6475 IEM_MC_END();
6476 }
6477 else
6478 {
6479 /* memory target */
6480 IEM_MC_BEGIN(0, 1);
6481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6484 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6485 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6486 } IEM_MC_ELSE() {
6487 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6488 } IEM_MC_ENDIF();
6489 IEM_MC_ADVANCE_RIP();
6490 IEM_MC_END();
6491 }
6492 return VINF_SUCCESS;
6493}
6494
6495
6496/** Opcode 0x0f 0x9d. */
6497FNIEMOP_DEF(iemOp_setnl_Eb)
6498{
6499 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6500 IEMOP_HLP_MIN_386();
6501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6502
6503 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6504 * any way. AMD says it's "unused", whatever that means. We're
6505 * ignoring for now. */
6506 if (IEM_IS_MODRM_REG_MODE(bRm))
6507 {
6508 /* register target */
6509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6510 IEM_MC_BEGIN(0, 0);
6511 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6512 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6513 } IEM_MC_ELSE() {
6514 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6515 } IEM_MC_ENDIF();
6516 IEM_MC_ADVANCE_RIP();
6517 IEM_MC_END();
6518 }
6519 else
6520 {
6521 /* memory target */
6522 IEM_MC_BEGIN(0, 1);
6523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6526 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6527 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6528 } IEM_MC_ELSE() {
6529 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6530 } IEM_MC_ENDIF();
6531 IEM_MC_ADVANCE_RIP();
6532 IEM_MC_END();
6533 }
6534 return VINF_SUCCESS;
6535}
6536
6537
6538/** Opcode 0x0f 0x9e. */
6539FNIEMOP_DEF(iemOp_setle_Eb)
6540{
6541 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6542 IEMOP_HLP_MIN_386();
6543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6544
6545 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6546 * any way. AMD says it's "unused", whatever that means. We're
6547 * ignoring for now. */
6548 if (IEM_IS_MODRM_REG_MODE(bRm))
6549 {
6550 /* register target */
6551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6552 IEM_MC_BEGIN(0, 0);
6553 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6554 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6555 } IEM_MC_ELSE() {
6556 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6557 } IEM_MC_ENDIF();
6558 IEM_MC_ADVANCE_RIP();
6559 IEM_MC_END();
6560 }
6561 else
6562 {
6563 /* memory target */
6564 IEM_MC_BEGIN(0, 1);
6565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6568 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6569 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6570 } IEM_MC_ELSE() {
6571 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6572 } IEM_MC_ENDIF();
6573 IEM_MC_ADVANCE_RIP();
6574 IEM_MC_END();
6575 }
6576 return VINF_SUCCESS;
6577}
6578
6579
6580/** Opcode 0x0f 0x9f. */
6581FNIEMOP_DEF(iemOp_setnle_Eb)
6582{
6583 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6584 IEMOP_HLP_MIN_386();
6585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6586
6587 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6588 * any way. AMD says it's "unused", whatever that means. We're
6589 * ignoring for now. */
6590 if (IEM_IS_MODRM_REG_MODE(bRm))
6591 {
6592 /* register target */
6593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6594 IEM_MC_BEGIN(0, 0);
6595 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6596 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6597 } IEM_MC_ELSE() {
6598 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6599 } IEM_MC_ENDIF();
6600 IEM_MC_ADVANCE_RIP();
6601 IEM_MC_END();
6602 }
6603 else
6604 {
6605 /* memory target */
6606 IEM_MC_BEGIN(0, 1);
6607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6610 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6611 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6612 } IEM_MC_ELSE() {
6613 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6614 } IEM_MC_ENDIF();
6615 IEM_MC_ADVANCE_RIP();
6616 IEM_MC_END();
6617 }
6618 return VINF_SUCCESS;
6619}
6620
6621
6622/**
6623 * Common 'push segment-register' helper.
6624 */
6625FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6626{
6627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6628 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6629 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6630
6631 switch (pVCpu->iem.s.enmEffOpSize)
6632 {
6633 case IEMMODE_16BIT:
6634 IEM_MC_BEGIN(0, 1);
6635 IEM_MC_LOCAL(uint16_t, u16Value);
6636 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6637 IEM_MC_PUSH_U16(u16Value);
6638 IEM_MC_ADVANCE_RIP();
6639 IEM_MC_END();
6640 break;
6641
6642 case IEMMODE_32BIT:
6643 IEM_MC_BEGIN(0, 1);
6644 IEM_MC_LOCAL(uint32_t, u32Value);
6645 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6646 IEM_MC_PUSH_U32_SREG(u32Value);
6647 IEM_MC_ADVANCE_RIP();
6648 IEM_MC_END();
6649 break;
6650
6651 case IEMMODE_64BIT:
6652 IEM_MC_BEGIN(0, 1);
6653 IEM_MC_LOCAL(uint64_t, u64Value);
6654 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6655 IEM_MC_PUSH_U64(u64Value);
6656 IEM_MC_ADVANCE_RIP();
6657 IEM_MC_END();
6658 break;
6659 }
6660
6661 return VINF_SUCCESS;
6662}
6663
6664
6665/** Opcode 0x0f 0xa0. */
6666FNIEMOP_DEF(iemOp_push_fs)
6667{
6668 IEMOP_MNEMONIC(push_fs, "push fs");
6669 IEMOP_HLP_MIN_386();
6670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6671 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6672}
6673
6674
6675/** Opcode 0x0f 0xa1. */
6676FNIEMOP_DEF(iemOp_pop_fs)
6677{
6678 IEMOP_MNEMONIC(pop_fs, "pop fs");
6679 IEMOP_HLP_MIN_386();
6680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6681 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6682}
6683
6684
6685/** Opcode 0x0f 0xa2. */
6686FNIEMOP_DEF(iemOp_cpuid)
6687{
6688 IEMOP_MNEMONIC(cpuid, "cpuid");
6689 IEMOP_HLP_MIN_486(); /* not all 486es. */
6690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6691 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6692}
6693
6694
6695/**
6696 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6697 * iemOp_bts_Ev_Gv.
6698 */
6699FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6700{
6701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6702 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6703
6704 if (IEM_IS_MODRM_REG_MODE(bRm))
6705 {
6706 /* register destination. */
6707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6708 switch (pVCpu->iem.s.enmEffOpSize)
6709 {
6710 case IEMMODE_16BIT:
6711 IEM_MC_BEGIN(3, 0);
6712 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6713 IEM_MC_ARG(uint16_t, u16Src, 1);
6714 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6715
6716 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6717 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6718 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6719 IEM_MC_REF_EFLAGS(pEFlags);
6720 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6721
6722 IEM_MC_ADVANCE_RIP();
6723 IEM_MC_END();
6724 return VINF_SUCCESS;
6725
6726 case IEMMODE_32BIT:
6727 IEM_MC_BEGIN(3, 0);
6728 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6729 IEM_MC_ARG(uint32_t, u32Src, 1);
6730 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6731
6732 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6733 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6734 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6735 IEM_MC_REF_EFLAGS(pEFlags);
6736 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6737
6738 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6739 IEM_MC_ADVANCE_RIP();
6740 IEM_MC_END();
6741 return VINF_SUCCESS;
6742
6743 case IEMMODE_64BIT:
6744 IEM_MC_BEGIN(3, 0);
6745 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6746 IEM_MC_ARG(uint64_t, u64Src, 1);
6747 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6748
6749 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6750 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6751 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6752 IEM_MC_REF_EFLAGS(pEFlags);
6753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6754
6755 IEM_MC_ADVANCE_RIP();
6756 IEM_MC_END();
6757 return VINF_SUCCESS;
6758
6759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6760 }
6761 }
6762 else
6763 {
6764 /* memory destination. */
6765
6766 uint32_t fAccess;
6767 if (pImpl->pfnLockedU16)
6768 fAccess = IEM_ACCESS_DATA_RW;
6769 else /* BT */
6770 fAccess = IEM_ACCESS_DATA_R;
6771
6772 /** @todo test negative bit offsets! */
6773 switch (pVCpu->iem.s.enmEffOpSize)
6774 {
6775 case IEMMODE_16BIT:
6776 IEM_MC_BEGIN(3, 2);
6777 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6778 IEM_MC_ARG(uint16_t, u16Src, 1);
6779 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6781 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6782
6783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6784 if (pImpl->pfnLockedU16)
6785 IEMOP_HLP_DONE_DECODING();
6786 else
6787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6788 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6789 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6790 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6791 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6792 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6793 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6794 IEM_MC_FETCH_EFLAGS(EFlags);
6795
6796 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6797 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6798 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6799 else
6800 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6801 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6802
6803 IEM_MC_COMMIT_EFLAGS(EFlags);
6804 IEM_MC_ADVANCE_RIP();
6805 IEM_MC_END();
6806 return VINF_SUCCESS;
6807
6808 case IEMMODE_32BIT:
6809 IEM_MC_BEGIN(3, 2);
6810 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6811 IEM_MC_ARG(uint32_t, u32Src, 1);
6812 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6814 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6815
6816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6817 if (pImpl->pfnLockedU16)
6818 IEMOP_HLP_DONE_DECODING();
6819 else
6820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6821 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6822 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6823 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6824 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6825 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6826 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6827 IEM_MC_FETCH_EFLAGS(EFlags);
6828
6829 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6830 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6831 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6832 else
6833 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6834 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6835
6836 IEM_MC_COMMIT_EFLAGS(EFlags);
6837 IEM_MC_ADVANCE_RIP();
6838 IEM_MC_END();
6839 return VINF_SUCCESS;
6840
6841 case IEMMODE_64BIT:
6842 IEM_MC_BEGIN(3, 2);
6843 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6844 IEM_MC_ARG(uint64_t, u64Src, 1);
6845 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6847 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6848
6849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6850 if (pImpl->pfnLockedU16)
6851 IEMOP_HLP_DONE_DECODING();
6852 else
6853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6854 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6855 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6856 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6857 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6858 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6859 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6860 IEM_MC_FETCH_EFLAGS(EFlags);
6861
6862 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6863 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6864 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6865 else
6866 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6867 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6868
6869 IEM_MC_COMMIT_EFLAGS(EFlags);
6870 IEM_MC_ADVANCE_RIP();
6871 IEM_MC_END();
6872 return VINF_SUCCESS;
6873
6874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6875 }
6876 }
6877}
6878
6879
6880/** Opcode 0x0f 0xa3. */
6881FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6882{
6883 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6884 IEMOP_HLP_MIN_386();
6885 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6886}
6887
6888
6889/**
6890 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6891 */
6892FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6893{
6894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6895 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6896
6897 if (IEM_IS_MODRM_REG_MODE(bRm))
6898 {
6899 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6901
6902 switch (pVCpu->iem.s.enmEffOpSize)
6903 {
6904 case IEMMODE_16BIT:
6905 IEM_MC_BEGIN(4, 0);
6906 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6907 IEM_MC_ARG(uint16_t, u16Src, 1);
6908 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6909 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6910
6911 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6912 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6913 IEM_MC_REF_EFLAGS(pEFlags);
6914 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6915
6916 IEM_MC_ADVANCE_RIP();
6917 IEM_MC_END();
6918 return VINF_SUCCESS;
6919
6920 case IEMMODE_32BIT:
6921 IEM_MC_BEGIN(4, 0);
6922 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6923 IEM_MC_ARG(uint32_t, u32Src, 1);
6924 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6925 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6926
6927 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6928 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6929 IEM_MC_REF_EFLAGS(pEFlags);
6930 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6931
6932 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6933 IEM_MC_ADVANCE_RIP();
6934 IEM_MC_END();
6935 return VINF_SUCCESS;
6936
6937 case IEMMODE_64BIT:
6938 IEM_MC_BEGIN(4, 0);
6939 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6940 IEM_MC_ARG(uint64_t, u64Src, 1);
6941 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6942 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6943
6944 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6945 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6946 IEM_MC_REF_EFLAGS(pEFlags);
6947 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6948
6949 IEM_MC_ADVANCE_RIP();
6950 IEM_MC_END();
6951 return VINF_SUCCESS;
6952
6953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6954 }
6955 }
6956 else
6957 {
6958 switch (pVCpu->iem.s.enmEffOpSize)
6959 {
6960 case IEMMODE_16BIT:
6961 IEM_MC_BEGIN(4, 2);
6962 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6963 IEM_MC_ARG(uint16_t, u16Src, 1);
6964 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6965 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6967
6968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6969 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6970 IEM_MC_ASSIGN(cShiftArg, cShift);
6971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6972 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6973 IEM_MC_FETCH_EFLAGS(EFlags);
6974 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6975 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6976
6977 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6978 IEM_MC_COMMIT_EFLAGS(EFlags);
6979 IEM_MC_ADVANCE_RIP();
6980 IEM_MC_END();
6981 return VINF_SUCCESS;
6982
6983 case IEMMODE_32BIT:
6984 IEM_MC_BEGIN(4, 2);
6985 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6986 IEM_MC_ARG(uint32_t, u32Src, 1);
6987 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6988 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6990
6991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6992 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6993 IEM_MC_ASSIGN(cShiftArg, cShift);
6994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6995 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
6996 IEM_MC_FETCH_EFLAGS(EFlags);
6997 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6998 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6999
7000 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7001 IEM_MC_COMMIT_EFLAGS(EFlags);
7002 IEM_MC_ADVANCE_RIP();
7003 IEM_MC_END();
7004 return VINF_SUCCESS;
7005
7006 case IEMMODE_64BIT:
7007 IEM_MC_BEGIN(4, 2);
7008 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7009 IEM_MC_ARG(uint64_t, u64Src, 1);
7010 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7011 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7013
7014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7015 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7016 IEM_MC_ASSIGN(cShiftArg, cShift);
7017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7018 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7019 IEM_MC_FETCH_EFLAGS(EFlags);
7020 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7021 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7022
7023 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7024 IEM_MC_COMMIT_EFLAGS(EFlags);
7025 IEM_MC_ADVANCE_RIP();
7026 IEM_MC_END();
7027 return VINF_SUCCESS;
7028
7029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7030 }
7031 }
7032}
7033
7034
7035/**
7036 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
7037 */
7038FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
7039{
7040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7041 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7042
7043 if (IEM_IS_MODRM_REG_MODE(bRm))
7044 {
7045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7046
7047 switch (pVCpu->iem.s.enmEffOpSize)
7048 {
7049 case IEMMODE_16BIT:
7050 IEM_MC_BEGIN(4, 0);
7051 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7052 IEM_MC_ARG(uint16_t, u16Src, 1);
7053 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7054 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7055
7056 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7057 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7058 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7059 IEM_MC_REF_EFLAGS(pEFlags);
7060 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7061
7062 IEM_MC_ADVANCE_RIP();
7063 IEM_MC_END();
7064 return VINF_SUCCESS;
7065
7066 case IEMMODE_32BIT:
7067 IEM_MC_BEGIN(4, 0);
7068 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7069 IEM_MC_ARG(uint32_t, u32Src, 1);
7070 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7071 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7072
7073 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7074 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7075 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7076 IEM_MC_REF_EFLAGS(pEFlags);
7077 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7078
7079 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7080 IEM_MC_ADVANCE_RIP();
7081 IEM_MC_END();
7082 return VINF_SUCCESS;
7083
7084 case IEMMODE_64BIT:
7085 IEM_MC_BEGIN(4, 0);
7086 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7087 IEM_MC_ARG(uint64_t, u64Src, 1);
7088 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7089 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7090
7091 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7092 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7093 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7094 IEM_MC_REF_EFLAGS(pEFlags);
7095 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7096
7097 IEM_MC_ADVANCE_RIP();
7098 IEM_MC_END();
7099 return VINF_SUCCESS;
7100
7101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7102 }
7103 }
7104 else
7105 {
7106 switch (pVCpu->iem.s.enmEffOpSize)
7107 {
7108 case IEMMODE_16BIT:
7109 IEM_MC_BEGIN(4, 2);
7110 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7111 IEM_MC_ARG(uint16_t, u16Src, 1);
7112 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7113 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7115
7116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7118 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7119 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7120 IEM_MC_FETCH_EFLAGS(EFlags);
7121 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7122 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7123
7124 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7125 IEM_MC_COMMIT_EFLAGS(EFlags);
7126 IEM_MC_ADVANCE_RIP();
7127 IEM_MC_END();
7128 return VINF_SUCCESS;
7129
7130 case IEMMODE_32BIT:
7131 IEM_MC_BEGIN(4, 2);
7132 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7133 IEM_MC_ARG(uint32_t, u32Src, 1);
7134 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7135 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7137
7138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7140 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7141 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7142 IEM_MC_FETCH_EFLAGS(EFlags);
7143 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7144 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7145
7146 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7147 IEM_MC_COMMIT_EFLAGS(EFlags);
7148 IEM_MC_ADVANCE_RIP();
7149 IEM_MC_END();
7150 return VINF_SUCCESS;
7151
7152 case IEMMODE_64BIT:
7153 IEM_MC_BEGIN(4, 2);
7154 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7155 IEM_MC_ARG(uint64_t, u64Src, 1);
7156 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7157 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7159
7160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7162 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7163 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7164 IEM_MC_FETCH_EFLAGS(EFlags);
7165 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7166 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7167
7168 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7169 IEM_MC_COMMIT_EFLAGS(EFlags);
7170 IEM_MC_ADVANCE_RIP();
7171 IEM_MC_END();
7172 return VINF_SUCCESS;
7173
7174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7175 }
7176 }
7177}
7178
7179
7180
7181/** Opcode 0x0f 0xa4. */
7182FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
7183{
7184 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
7185 IEMOP_HLP_MIN_386();
7186 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7187}
7188
7189
7190/** Opcode 0x0f 0xa5. */
7191FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
7192{
7193 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
7194 IEMOP_HLP_MIN_386();
7195 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7196}
7197
7198
7199/** Opcode 0x0f 0xa8. */
7200FNIEMOP_DEF(iemOp_push_gs)
7201{
7202 IEMOP_MNEMONIC(push_gs, "push gs");
7203 IEMOP_HLP_MIN_386();
7204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7205 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
7206}
7207
7208
7209/** Opcode 0x0f 0xa9. */
7210FNIEMOP_DEF(iemOp_pop_gs)
7211{
7212 IEMOP_MNEMONIC(pop_gs, "pop gs");
7213 IEMOP_HLP_MIN_386();
7214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7215 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
7216}
7217
7218
7219/** Opcode 0x0f 0xaa. */
7220FNIEMOP_DEF(iemOp_rsm)
7221{
7222 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
7223 IEMOP_HLP_MIN_386(); /* 386SL and later. */
7224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7225 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
7226}
7227
7228
7229
7230/** Opcode 0x0f 0xab. */
7231FNIEMOP_DEF(iemOp_bts_Ev_Gv)
7232{
7233 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
7234 IEMOP_HLP_MIN_386();
7235 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
7236}
7237
7238
7239/** Opcode 0x0f 0xac. */
7240FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
7241{
7242 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
7243 IEMOP_HLP_MIN_386();
7244 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7245}
7246
7247
7248/** Opcode 0x0f 0xad. */
7249FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
7250{
7251 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
7252 IEMOP_HLP_MIN_386();
7253 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7254}
7255
7256
7257/** Opcode 0x0f 0xae mem/0. */
7258FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
7259{
7260 IEMOP_MNEMONIC(fxsave, "fxsave m512");
7261 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7262 return IEMOP_RAISE_INVALID_OPCODE();
7263
7264 IEM_MC_BEGIN(3, 1);
7265 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7266 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7267 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7270 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7271 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7272 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
7273 IEM_MC_END();
7274 return VINF_SUCCESS;
7275}
7276
7277
7278/** Opcode 0x0f 0xae mem/1. */
7279FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
7280{
7281 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
7282 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7283 return IEMOP_RAISE_INVALID_OPCODE();
7284
7285 IEM_MC_BEGIN(3, 1);
7286 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7287 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7288 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7291 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7292 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7293 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7294 IEM_MC_END();
7295 return VINF_SUCCESS;
7296}
7297
7298
7299/**
7300 * @opmaps grp15
7301 * @opcode !11/2
7302 * @oppfx none
7303 * @opcpuid sse
7304 * @opgroup og_sse_mxcsrsm
7305 * @opxcpttype 5
7306 * @optest op1=0 -> mxcsr=0
7307 * @optest op1=0x2083 -> mxcsr=0x2083
7308 * @optest op1=0xfffffffe -> value.xcpt=0xd
7309 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
7310 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
7311 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
7312 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
7313 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
7314 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7315 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7316 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7317 */
7318FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
7319{
7320 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7321 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7322 return IEMOP_RAISE_INVALID_OPCODE();
7323
7324 IEM_MC_BEGIN(2, 0);
7325 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7326 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7330 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7331 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
7332 IEM_MC_END();
7333 return VINF_SUCCESS;
7334}
7335
7336
7337/**
7338 * @opmaps grp15
7339 * @opcode !11/3
7340 * @oppfx none
7341 * @opcpuid sse
7342 * @opgroup og_sse_mxcsrsm
7343 * @opxcpttype 5
7344 * @optest mxcsr=0 -> op1=0
7345 * @optest mxcsr=0x2083 -> op1=0x2083
7346 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
7347 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
7348 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
7349 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
7350 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
7351 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7352 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7353 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7354 */
7355FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
7356{
7357 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7358 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7359 return IEMOP_RAISE_INVALID_OPCODE();
7360
7361 IEM_MC_BEGIN(2, 0);
7362 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7363 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7366 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7367 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7368 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
7369 IEM_MC_END();
7370 return VINF_SUCCESS;
7371}
7372
7373
7374/**
7375 * @opmaps grp15
7376 * @opcode !11/4
7377 * @oppfx none
7378 * @opcpuid xsave
7379 * @opgroup og_system
7380 * @opxcpttype none
7381 */
7382FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
7383{
7384 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
7385 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7386 return IEMOP_RAISE_INVALID_OPCODE();
7387
7388 IEM_MC_BEGIN(3, 0);
7389 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7390 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7391 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7394 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7395 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7396 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
7397 IEM_MC_END();
7398 return VINF_SUCCESS;
7399}
7400
7401
7402/**
7403 * @opmaps grp15
7404 * @opcode !11/5
7405 * @oppfx none
7406 * @opcpuid xsave
7407 * @opgroup og_system
7408 * @opxcpttype none
7409 */
7410FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
7411{
7412 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
7413 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7414 return IEMOP_RAISE_INVALID_OPCODE();
7415
7416 IEM_MC_BEGIN(3, 0);
7417 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7418 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7419 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7422 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7423 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7424 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7425 IEM_MC_END();
7426 return VINF_SUCCESS;
7427}
7428
7429/** Opcode 0x0f 0xae mem/6. */
7430FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
7431
7432/**
7433 * @opmaps grp15
7434 * @opcode !11/7
7435 * @oppfx none
7436 * @opcpuid clfsh
7437 * @opgroup og_cachectl
7438 * @optest op1=1 ->
7439 */
7440FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
7441{
7442 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7443 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
7444 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7445
7446 IEM_MC_BEGIN(2, 0);
7447 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7448 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7451 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7452 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7453 IEM_MC_END();
7454 return VINF_SUCCESS;
7455}
7456
7457/**
7458 * @opmaps grp15
7459 * @opcode !11/7
7460 * @oppfx 0x66
7461 * @opcpuid clflushopt
7462 * @opgroup og_cachectl
7463 * @optest op1=1 ->
7464 */
7465FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
7466{
7467 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7468 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
7469 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7470
7471 IEM_MC_BEGIN(2, 0);
7472 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7473 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7476 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7477 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7478 IEM_MC_END();
7479 return VINF_SUCCESS;
7480}
7481
7482
7483/** Opcode 0x0f 0xae 11b/5. */
7484FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7485{
7486 RT_NOREF_PV(bRm);
7487 IEMOP_MNEMONIC(lfence, "lfence");
7488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7489 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7490 return IEMOP_RAISE_INVALID_OPCODE();
7491
7492 IEM_MC_BEGIN(0, 0);
7493#ifndef RT_ARCH_ARM64
7494 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7495#endif
7496 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7497#ifndef RT_ARCH_ARM64
7498 else
7499 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7500#endif
7501 IEM_MC_ADVANCE_RIP();
7502 IEM_MC_END();
7503 return VINF_SUCCESS;
7504}
7505
7506
7507/** Opcode 0x0f 0xae 11b/6. */
7508FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7509{
7510 RT_NOREF_PV(bRm);
7511 IEMOP_MNEMONIC(mfence, "mfence");
7512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7513 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7514 return IEMOP_RAISE_INVALID_OPCODE();
7515
7516 IEM_MC_BEGIN(0, 0);
7517#ifndef RT_ARCH_ARM64
7518 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7519#endif
7520 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7521#ifndef RT_ARCH_ARM64
7522 else
7523 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7524#endif
7525 IEM_MC_ADVANCE_RIP();
7526 IEM_MC_END();
7527 return VINF_SUCCESS;
7528}
7529
7530
7531/** Opcode 0x0f 0xae 11b/7. */
7532FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7533{
7534 RT_NOREF_PV(bRm);
7535 IEMOP_MNEMONIC(sfence, "sfence");
7536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7537 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7538 return IEMOP_RAISE_INVALID_OPCODE();
7539
7540 IEM_MC_BEGIN(0, 0);
7541#ifndef RT_ARCH_ARM64
7542 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7543#endif
7544 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7545#ifndef RT_ARCH_ARM64
7546 else
7547 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7548#endif
7549 IEM_MC_ADVANCE_RIP();
7550 IEM_MC_END();
7551 return VINF_SUCCESS;
7552}
7553
7554
7555/** Opcode 0xf3 0x0f 0xae 11b/0. */
7556FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7557{
7558 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7560 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7561 {
7562 IEM_MC_BEGIN(1, 0);
7563 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7564 IEM_MC_ARG(uint64_t, u64Dst, 0);
7565 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7566 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
7567 IEM_MC_ADVANCE_RIP();
7568 IEM_MC_END();
7569 }
7570 else
7571 {
7572 IEM_MC_BEGIN(1, 0);
7573 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7574 IEM_MC_ARG(uint32_t, u32Dst, 0);
7575 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7576 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
7577 IEM_MC_ADVANCE_RIP();
7578 IEM_MC_END();
7579 }
7580 return VINF_SUCCESS;
7581}
7582
7583
7584/** Opcode 0xf3 0x0f 0xae 11b/1. */
7585FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7586{
7587 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7589 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7590 {
7591 IEM_MC_BEGIN(1, 0);
7592 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7593 IEM_MC_ARG(uint64_t, u64Dst, 0);
7594 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7595 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
7596 IEM_MC_ADVANCE_RIP();
7597 IEM_MC_END();
7598 }
7599 else
7600 {
7601 IEM_MC_BEGIN(1, 0);
7602 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7603 IEM_MC_ARG(uint32_t, u32Dst, 0);
7604 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7605 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
7606 IEM_MC_ADVANCE_RIP();
7607 IEM_MC_END();
7608 }
7609 return VINF_SUCCESS;
7610}
7611
7612
7613/** Opcode 0xf3 0x0f 0xae 11b/2. */
7614FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7615{
7616 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7618 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7619 {
7620 IEM_MC_BEGIN(1, 0);
7621 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7622 IEM_MC_ARG(uint64_t, u64Dst, 0);
7623 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7624 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7625 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7626 IEM_MC_ADVANCE_RIP();
7627 IEM_MC_END();
7628 }
7629 else
7630 {
7631 IEM_MC_BEGIN(1, 0);
7632 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7633 IEM_MC_ARG(uint32_t, u32Dst, 0);
7634 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7635 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7636 IEM_MC_ADVANCE_RIP();
7637 IEM_MC_END();
7638 }
7639 return VINF_SUCCESS;
7640}
7641
7642
7643/** Opcode 0xf3 0x0f 0xae 11b/3. */
7644FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7645{
7646 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7648 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7649 {
7650 IEM_MC_BEGIN(1, 0);
7651 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7652 IEM_MC_ARG(uint64_t, u64Dst, 0);
7653 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7654 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7655 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7656 IEM_MC_ADVANCE_RIP();
7657 IEM_MC_END();
7658 }
7659 else
7660 {
7661 IEM_MC_BEGIN(1, 0);
7662 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7663 IEM_MC_ARG(uint32_t, u32Dst, 0);
7664 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7665 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7666 IEM_MC_ADVANCE_RIP();
7667 IEM_MC_END();
7668 }
7669 return VINF_SUCCESS;
7670}
7671
7672
7673/**
7674 * Group 15 jump table for register variant.
7675 */
7676IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7677{ /* pfx: none, 066h, 0f3h, 0f2h */
7678 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7679 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7680 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7681 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7682 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7683 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7684 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7685 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7686};
7687AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7688
7689
7690/**
7691 * Group 15 jump table for memory variant.
7692 */
7693IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7694{ /* pfx: none, 066h, 0f3h, 0f2h */
7695 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7696 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7697 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7698 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7699 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7700 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7701 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7702 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7703};
7704AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7705
7706
7707/** Opcode 0x0f 0xae. */
7708FNIEMOP_DEF(iemOp_Grp15)
7709{
7710 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7712 if (IEM_IS_MODRM_REG_MODE(bRm))
7713 /* register, register */
7714 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7715 + pVCpu->iem.s.idxPrefix], bRm);
7716 /* memory, register */
7717 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7718 + pVCpu->iem.s.idxPrefix], bRm);
7719}
7720
7721
7722/** Opcode 0x0f 0xaf. */
7723FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7724{
7725 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7726 IEMOP_HLP_MIN_386();
7727 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7728 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
7729}
7730
7731
7732/** Opcode 0x0f 0xb0. */
7733FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7734{
7735 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7736 IEMOP_HLP_MIN_486();
7737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7738
7739 if (IEM_IS_MODRM_REG_MODE(bRm))
7740 {
7741 IEMOP_HLP_DONE_DECODING();
7742 IEM_MC_BEGIN(4, 0);
7743 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7744 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7745 IEM_MC_ARG(uint8_t, u8Src, 2);
7746 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7747
7748 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7749 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7750 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7751 IEM_MC_REF_EFLAGS(pEFlags);
7752 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7753 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7754 else
7755 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7756
7757 IEM_MC_ADVANCE_RIP();
7758 IEM_MC_END();
7759 }
7760 else
7761 {
7762 IEM_MC_BEGIN(4, 3);
7763 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7764 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7765 IEM_MC_ARG(uint8_t, u8Src, 2);
7766 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7768 IEM_MC_LOCAL(uint8_t, u8Al);
7769
7770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7771 IEMOP_HLP_DONE_DECODING();
7772 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7773 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7774 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7775 IEM_MC_FETCH_EFLAGS(EFlags);
7776 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7777 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7778 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7779 else
7780 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7781
7782 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7783 IEM_MC_COMMIT_EFLAGS(EFlags);
7784 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7785 IEM_MC_ADVANCE_RIP();
7786 IEM_MC_END();
7787 }
7788 return VINF_SUCCESS;
7789}
7790
7791/** Opcode 0x0f 0xb1. */
7792FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7793{
7794 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7795 IEMOP_HLP_MIN_486();
7796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7797
7798 if (IEM_IS_MODRM_REG_MODE(bRm))
7799 {
7800 IEMOP_HLP_DONE_DECODING();
7801 switch (pVCpu->iem.s.enmEffOpSize)
7802 {
7803 case IEMMODE_16BIT:
7804 IEM_MC_BEGIN(4, 0);
7805 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7806 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7807 IEM_MC_ARG(uint16_t, u16Src, 2);
7808 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7809
7810 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7811 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7812 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7813 IEM_MC_REF_EFLAGS(pEFlags);
7814 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7815 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7816 else
7817 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7818
7819 IEM_MC_ADVANCE_RIP();
7820 IEM_MC_END();
7821 return VINF_SUCCESS;
7822
7823 case IEMMODE_32BIT:
7824 IEM_MC_BEGIN(4, 0);
7825 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7826 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7827 IEM_MC_ARG(uint32_t, u32Src, 2);
7828 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7829
7830 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7831 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7832 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7833 IEM_MC_REF_EFLAGS(pEFlags);
7834 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7835 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7836 else
7837 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7838
7839 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7840 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7841 IEM_MC_ADVANCE_RIP();
7842 IEM_MC_END();
7843 return VINF_SUCCESS;
7844
7845 case IEMMODE_64BIT:
7846 IEM_MC_BEGIN(4, 0);
7847 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7848 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7849#ifdef RT_ARCH_X86
7850 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7851#else
7852 IEM_MC_ARG(uint64_t, u64Src, 2);
7853#endif
7854 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7855
7856 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7857 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7858 IEM_MC_REF_EFLAGS(pEFlags);
7859#ifdef RT_ARCH_X86
7860 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7861 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7862 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7863 else
7864 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7865#else
7866 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7867 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7868 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7869 else
7870 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7871#endif
7872
7873 IEM_MC_ADVANCE_RIP();
7874 IEM_MC_END();
7875 return VINF_SUCCESS;
7876
7877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7878 }
7879 }
7880 else
7881 {
7882 switch (pVCpu->iem.s.enmEffOpSize)
7883 {
7884 case IEMMODE_16BIT:
7885 IEM_MC_BEGIN(4, 3);
7886 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7887 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7888 IEM_MC_ARG(uint16_t, u16Src, 2);
7889 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7891 IEM_MC_LOCAL(uint16_t, u16Ax);
7892
7893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7894 IEMOP_HLP_DONE_DECODING();
7895 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7896 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7897 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7898 IEM_MC_FETCH_EFLAGS(EFlags);
7899 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7900 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7901 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7902 else
7903 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7904
7905 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7906 IEM_MC_COMMIT_EFLAGS(EFlags);
7907 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7908 IEM_MC_ADVANCE_RIP();
7909 IEM_MC_END();
7910 return VINF_SUCCESS;
7911
7912 case IEMMODE_32BIT:
7913 IEM_MC_BEGIN(4, 3);
7914 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7915 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7916 IEM_MC_ARG(uint32_t, u32Src, 2);
7917 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7919 IEM_MC_LOCAL(uint32_t, u32Eax);
7920
7921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7922 IEMOP_HLP_DONE_DECODING();
7923 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7924 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7925 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7926 IEM_MC_FETCH_EFLAGS(EFlags);
7927 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7928 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7929 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7930 else
7931 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7932
7933 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7934 IEM_MC_COMMIT_EFLAGS(EFlags);
7935 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7936 IEM_MC_ADVANCE_RIP();
7937 IEM_MC_END();
7938 return VINF_SUCCESS;
7939
7940 case IEMMODE_64BIT:
7941 IEM_MC_BEGIN(4, 3);
7942 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7943 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7944#ifdef RT_ARCH_X86
7945 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7946#else
7947 IEM_MC_ARG(uint64_t, u64Src, 2);
7948#endif
7949 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7951 IEM_MC_LOCAL(uint64_t, u64Rax);
7952
7953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7954 IEMOP_HLP_DONE_DECODING();
7955 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7956 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7957 IEM_MC_FETCH_EFLAGS(EFlags);
7958 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7959#ifdef RT_ARCH_X86
7960 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7961 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7962 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7963 else
7964 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7965#else
7966 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7967 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7968 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7969 else
7970 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7971#endif
7972
7973 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7974 IEM_MC_COMMIT_EFLAGS(EFlags);
7975 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7976 IEM_MC_ADVANCE_RIP();
7977 IEM_MC_END();
7978 return VINF_SUCCESS;
7979
7980 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7981 }
7982 }
7983}
7984
7985
7986FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7987{
7988 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
7989 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
7990
7991 switch (pVCpu->iem.s.enmEffOpSize)
7992 {
7993 case IEMMODE_16BIT:
7994 IEM_MC_BEGIN(5, 1);
7995 IEM_MC_ARG(uint16_t, uSel, 0);
7996 IEM_MC_ARG(uint16_t, offSeg, 1);
7997 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7998 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7999 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8000 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8003 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8004 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
8005 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8006 IEM_MC_END();
8007 return VINF_SUCCESS;
8008
8009 case IEMMODE_32BIT:
8010 IEM_MC_BEGIN(5, 1);
8011 IEM_MC_ARG(uint16_t, uSel, 0);
8012 IEM_MC_ARG(uint32_t, offSeg, 1);
8013 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8014 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8015 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8016 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8019 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8020 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
8021 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8022 IEM_MC_END();
8023 return VINF_SUCCESS;
8024
8025 case IEMMODE_64BIT:
8026 IEM_MC_BEGIN(5, 1);
8027 IEM_MC_ARG(uint16_t, uSel, 0);
8028 IEM_MC_ARG(uint64_t, offSeg, 1);
8029 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8030 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8031 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8032 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8035 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
8036 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8037 else
8038 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8039 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
8040 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8041 IEM_MC_END();
8042 return VINF_SUCCESS;
8043
8044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8045 }
8046}
8047
8048
8049/** Opcode 0x0f 0xb2. */
8050FNIEMOP_DEF(iemOp_lss_Gv_Mp)
8051{
8052 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
8053 IEMOP_HLP_MIN_386();
8054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8055 if (IEM_IS_MODRM_REG_MODE(bRm))
8056 return IEMOP_RAISE_INVALID_OPCODE();
8057 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
8058}
8059
8060
8061/** Opcode 0x0f 0xb3. */
8062FNIEMOP_DEF(iemOp_btr_Ev_Gv)
8063{
8064 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
8065 IEMOP_HLP_MIN_386();
8066 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
8067}
8068
8069
8070/** Opcode 0x0f 0xb4. */
8071FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
8072{
8073 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
8074 IEMOP_HLP_MIN_386();
8075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8076 if (IEM_IS_MODRM_REG_MODE(bRm))
8077 return IEMOP_RAISE_INVALID_OPCODE();
8078 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
8079}
8080
8081
8082/** Opcode 0x0f 0xb5. */
8083FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
8084{
8085 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
8086 IEMOP_HLP_MIN_386();
8087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8088 if (IEM_IS_MODRM_REG_MODE(bRm))
8089 return IEMOP_RAISE_INVALID_OPCODE();
8090 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
8091}
8092
8093
8094/** Opcode 0x0f 0xb6. */
8095FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
8096{
8097 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
8098 IEMOP_HLP_MIN_386();
8099
8100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8101
8102 /*
8103 * If rm is denoting a register, no more instruction bytes.
8104 */
8105 if (IEM_IS_MODRM_REG_MODE(bRm))
8106 {
8107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8108 switch (pVCpu->iem.s.enmEffOpSize)
8109 {
8110 case IEMMODE_16BIT:
8111 IEM_MC_BEGIN(0, 1);
8112 IEM_MC_LOCAL(uint16_t, u16Value);
8113 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8114 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8115 IEM_MC_ADVANCE_RIP();
8116 IEM_MC_END();
8117 return VINF_SUCCESS;
8118
8119 case IEMMODE_32BIT:
8120 IEM_MC_BEGIN(0, 1);
8121 IEM_MC_LOCAL(uint32_t, u32Value);
8122 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8123 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8124 IEM_MC_ADVANCE_RIP();
8125 IEM_MC_END();
8126 return VINF_SUCCESS;
8127
8128 case IEMMODE_64BIT:
8129 IEM_MC_BEGIN(0, 1);
8130 IEM_MC_LOCAL(uint64_t, u64Value);
8131 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8132 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8133 IEM_MC_ADVANCE_RIP();
8134 IEM_MC_END();
8135 return VINF_SUCCESS;
8136
8137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8138 }
8139 }
8140 else
8141 {
8142 /*
8143 * We're loading a register from memory.
8144 */
8145 switch (pVCpu->iem.s.enmEffOpSize)
8146 {
8147 case IEMMODE_16BIT:
8148 IEM_MC_BEGIN(0, 2);
8149 IEM_MC_LOCAL(uint16_t, u16Value);
8150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8153 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8154 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8155 IEM_MC_ADVANCE_RIP();
8156 IEM_MC_END();
8157 return VINF_SUCCESS;
8158
8159 case IEMMODE_32BIT:
8160 IEM_MC_BEGIN(0, 2);
8161 IEM_MC_LOCAL(uint32_t, u32Value);
8162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8165 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8166 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8167 IEM_MC_ADVANCE_RIP();
8168 IEM_MC_END();
8169 return VINF_SUCCESS;
8170
8171 case IEMMODE_64BIT:
8172 IEM_MC_BEGIN(0, 2);
8173 IEM_MC_LOCAL(uint64_t, u64Value);
8174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8177 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8178 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8179 IEM_MC_ADVANCE_RIP();
8180 IEM_MC_END();
8181 return VINF_SUCCESS;
8182
8183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8184 }
8185 }
8186}
8187
8188
8189/** Opcode 0x0f 0xb7. */
8190FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
8191{
8192 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
8193 IEMOP_HLP_MIN_386();
8194
8195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8196
8197 /** @todo Not entirely sure how the operand size prefix is handled here,
8198 * assuming that it will be ignored. Would be nice to have a few
8199 * test for this. */
8200 /*
8201 * If rm is denoting a register, no more instruction bytes.
8202 */
8203 if (IEM_IS_MODRM_REG_MODE(bRm))
8204 {
8205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8206 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8207 {
8208 IEM_MC_BEGIN(0, 1);
8209 IEM_MC_LOCAL(uint32_t, u32Value);
8210 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8211 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8212 IEM_MC_ADVANCE_RIP();
8213 IEM_MC_END();
8214 }
8215 else
8216 {
8217 IEM_MC_BEGIN(0, 1);
8218 IEM_MC_LOCAL(uint64_t, u64Value);
8219 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8220 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8221 IEM_MC_ADVANCE_RIP();
8222 IEM_MC_END();
8223 }
8224 }
8225 else
8226 {
8227 /*
8228 * We're loading a register from memory.
8229 */
8230 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8231 {
8232 IEM_MC_BEGIN(0, 2);
8233 IEM_MC_LOCAL(uint32_t, u32Value);
8234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8237 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8238 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8239 IEM_MC_ADVANCE_RIP();
8240 IEM_MC_END();
8241 }
8242 else
8243 {
8244 IEM_MC_BEGIN(0, 2);
8245 IEM_MC_LOCAL(uint64_t, u64Value);
8246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8249 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8250 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8251 IEM_MC_ADVANCE_RIP();
8252 IEM_MC_END();
8253 }
8254 }
8255 return VINF_SUCCESS;
8256}
8257
8258
8259/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
8260FNIEMOP_UD_STUB(iemOp_jmpe);
8261
8262
8263/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
8264FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
8265{
8266 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8267 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
8268 return iemOp_InvalidNeedRM(pVCpu);
8269#ifndef TST_IEM_CHECK_MC
8270# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
8271 static const IEMOPBINSIZES s_Native =
8272 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
8273# endif
8274 static const IEMOPBINSIZES s_Fallback =
8275 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
8276#endif
8277 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
8278}
8279
8280
8281/**
8282 * @opcode 0xb9
8283 * @opinvalid intel-modrm
8284 * @optest ->
8285 */
8286FNIEMOP_DEF(iemOp_Grp10)
8287{
8288 /*
8289 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
8290 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
8291 */
8292 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
8293 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
8294 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
8295}
8296
8297
8298/** Opcode 0x0f 0xba. */
8299FNIEMOP_DEF(iemOp_Grp8)
8300{
8301 IEMOP_HLP_MIN_386();
8302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8303 PCIEMOPBINSIZES pImpl;
8304 switch (IEM_GET_MODRM_REG_8(bRm))
8305 {
8306 case 0: case 1: case 2: case 3:
8307 /* Both AMD and Intel want full modr/m decoding and imm8. */
8308 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
8309 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
8310 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
8311 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
8312 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
8313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8314 }
8315 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8316
8317 if (IEM_IS_MODRM_REG_MODE(bRm))
8318 {
8319 /* register destination. */
8320 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8322
8323 switch (pVCpu->iem.s.enmEffOpSize)
8324 {
8325 case IEMMODE_16BIT:
8326 IEM_MC_BEGIN(3, 0);
8327 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8328 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
8329 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8330
8331 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8332 IEM_MC_REF_EFLAGS(pEFlags);
8333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8334
8335 IEM_MC_ADVANCE_RIP();
8336 IEM_MC_END();
8337 return VINF_SUCCESS;
8338
8339 case IEMMODE_32BIT:
8340 IEM_MC_BEGIN(3, 0);
8341 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8342 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
8343 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8344
8345 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8346 IEM_MC_REF_EFLAGS(pEFlags);
8347 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8348
8349 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8350 IEM_MC_ADVANCE_RIP();
8351 IEM_MC_END();
8352 return VINF_SUCCESS;
8353
8354 case IEMMODE_64BIT:
8355 IEM_MC_BEGIN(3, 0);
8356 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8357 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
8358 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8359
8360 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8361 IEM_MC_REF_EFLAGS(pEFlags);
8362 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8363
8364 IEM_MC_ADVANCE_RIP();
8365 IEM_MC_END();
8366 return VINF_SUCCESS;
8367
8368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8369 }
8370 }
8371 else
8372 {
8373 /* memory destination. */
8374
8375 uint32_t fAccess;
8376 if (pImpl->pfnLockedU16)
8377 fAccess = IEM_ACCESS_DATA_RW;
8378 else /* BT */
8379 fAccess = IEM_ACCESS_DATA_R;
8380
8381 /** @todo test negative bit offsets! */
8382 switch (pVCpu->iem.s.enmEffOpSize)
8383 {
8384 case IEMMODE_16BIT:
8385 IEM_MC_BEGIN(3, 1);
8386 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8387 IEM_MC_ARG(uint16_t, u16Src, 1);
8388 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8390
8391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8392 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8393 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
8394 if (pImpl->pfnLockedU16)
8395 IEMOP_HLP_DONE_DECODING();
8396 else
8397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8398 IEM_MC_FETCH_EFLAGS(EFlags);
8399 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8400 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8401 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8402 else
8403 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8404 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8405
8406 IEM_MC_COMMIT_EFLAGS(EFlags);
8407 IEM_MC_ADVANCE_RIP();
8408 IEM_MC_END();
8409 return VINF_SUCCESS;
8410
8411 case IEMMODE_32BIT:
8412 IEM_MC_BEGIN(3, 1);
8413 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8414 IEM_MC_ARG(uint32_t, u32Src, 1);
8415 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8417
8418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8419 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8420 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
8421 if (pImpl->pfnLockedU16)
8422 IEMOP_HLP_DONE_DECODING();
8423 else
8424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8425 IEM_MC_FETCH_EFLAGS(EFlags);
8426 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8427 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8428 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8429 else
8430 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8431 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8432
8433 IEM_MC_COMMIT_EFLAGS(EFlags);
8434 IEM_MC_ADVANCE_RIP();
8435 IEM_MC_END();
8436 return VINF_SUCCESS;
8437
8438 case IEMMODE_64BIT:
8439 IEM_MC_BEGIN(3, 1);
8440 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8441 IEM_MC_ARG(uint64_t, u64Src, 1);
8442 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8444
8445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8446 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8447 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
8448 if (pImpl->pfnLockedU16)
8449 IEMOP_HLP_DONE_DECODING();
8450 else
8451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8452 IEM_MC_FETCH_EFLAGS(EFlags);
8453 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8454 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8455 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8456 else
8457 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8458 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8459
8460 IEM_MC_COMMIT_EFLAGS(EFlags);
8461 IEM_MC_ADVANCE_RIP();
8462 IEM_MC_END();
8463 return VINF_SUCCESS;
8464
8465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8466 }
8467 }
8468}
8469
8470
8471/** Opcode 0x0f 0xbb. */
8472FNIEMOP_DEF(iemOp_btc_Ev_Gv)
8473{
8474 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
8475 IEMOP_HLP_MIN_386();
8476 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
8477}
8478
8479
8480/**
8481 * Common worker for BSF and BSR instructions.
8482 *
8483 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
8484 * the destination register, which means that for 32-bit operations the high
8485 * bits must be left alone.
8486 *
8487 * @param pImpl Pointer to the instruction implementation (assembly).
8488 */
8489FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
8490{
8491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8492
8493 /*
8494 * If rm is denoting a register, no more instruction bytes.
8495 */
8496 if (IEM_IS_MODRM_REG_MODE(bRm))
8497 {
8498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8499 switch (pVCpu->iem.s.enmEffOpSize)
8500 {
8501 case IEMMODE_16BIT:
8502 IEM_MC_BEGIN(3, 0);
8503 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8504 IEM_MC_ARG(uint16_t, u16Src, 1);
8505 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8506
8507 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8508 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8509 IEM_MC_REF_EFLAGS(pEFlags);
8510 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8511
8512 IEM_MC_ADVANCE_RIP();
8513 IEM_MC_END();
8514 break;
8515
8516 case IEMMODE_32BIT:
8517 IEM_MC_BEGIN(3, 0);
8518 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8519 IEM_MC_ARG(uint32_t, u32Src, 1);
8520 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8521
8522 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8523 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8524 IEM_MC_REF_EFLAGS(pEFlags);
8525 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8526 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8527 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8528 IEM_MC_ENDIF();
8529 IEM_MC_ADVANCE_RIP();
8530 IEM_MC_END();
8531 break;
8532
8533 case IEMMODE_64BIT:
8534 IEM_MC_BEGIN(3, 0);
8535 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8536 IEM_MC_ARG(uint64_t, u64Src, 1);
8537 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8538
8539 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8540 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8541 IEM_MC_REF_EFLAGS(pEFlags);
8542 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8543
8544 IEM_MC_ADVANCE_RIP();
8545 IEM_MC_END();
8546 break;
8547 }
8548 }
8549 else
8550 {
8551 /*
8552 * We're accessing memory.
8553 */
8554 switch (pVCpu->iem.s.enmEffOpSize)
8555 {
8556 case IEMMODE_16BIT:
8557 IEM_MC_BEGIN(3, 1);
8558 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8559 IEM_MC_ARG(uint16_t, u16Src, 1);
8560 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8562
8563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8565 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8566 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8567 IEM_MC_REF_EFLAGS(pEFlags);
8568 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8569
8570 IEM_MC_ADVANCE_RIP();
8571 IEM_MC_END();
8572 break;
8573
8574 case IEMMODE_32BIT:
8575 IEM_MC_BEGIN(3, 1);
8576 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8577 IEM_MC_ARG(uint32_t, u32Src, 1);
8578 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8580
8581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8583 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8584 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8585 IEM_MC_REF_EFLAGS(pEFlags);
8586 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8587
8588 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8589 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8590 IEM_MC_ENDIF();
8591 IEM_MC_ADVANCE_RIP();
8592 IEM_MC_END();
8593 break;
8594
8595 case IEMMODE_64BIT:
8596 IEM_MC_BEGIN(3, 1);
8597 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8598 IEM_MC_ARG(uint64_t, u64Src, 1);
8599 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8601
8602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8604 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8605 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8606 IEM_MC_REF_EFLAGS(pEFlags);
8607 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8608
8609 IEM_MC_ADVANCE_RIP();
8610 IEM_MC_END();
8611 break;
8612 }
8613 }
8614 return VINF_SUCCESS;
8615}
8616
8617
8618/** Opcode 0x0f 0xbc. */
8619FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
8620{
8621 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
8622 IEMOP_HLP_MIN_386();
8623 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8624 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
8625}
8626
8627
8628/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
8629FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
8630{
8631 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8632 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
8633 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8634
8635#ifndef TST_IEM_CHECK_MC
8636 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
8637 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
8638 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
8639 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
8640 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
8641 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
8642 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
8643 {
8644 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
8645 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
8646 };
8647#endif
8648 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8649 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8650 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8651}
8652
8653
8654/** Opcode 0x0f 0xbd. */
8655FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
8656{
8657 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
8658 IEMOP_HLP_MIN_386();
8659 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8660 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
8661}
8662
8663
8664/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
8665FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
8666{
8667 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8668 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
8669 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8670
8671#ifndef TST_IEM_CHECK_MC
8672 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
8673 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
8674 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
8675 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
8676 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
8677 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
8678 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
8679 {
8680 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
8681 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
8682 };
8683#endif
8684 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8685 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8686 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8687}
8688
8689
8690
8691/** Opcode 0x0f 0xbe. */
8692FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
8693{
8694 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
8695 IEMOP_HLP_MIN_386();
8696
8697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8698
8699 /*
8700 * If rm is denoting a register, no more instruction bytes.
8701 */
8702 if (IEM_IS_MODRM_REG_MODE(bRm))
8703 {
8704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8705 switch (pVCpu->iem.s.enmEffOpSize)
8706 {
8707 case IEMMODE_16BIT:
8708 IEM_MC_BEGIN(0, 1);
8709 IEM_MC_LOCAL(uint16_t, u16Value);
8710 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8711 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8712 IEM_MC_ADVANCE_RIP();
8713 IEM_MC_END();
8714 return VINF_SUCCESS;
8715
8716 case IEMMODE_32BIT:
8717 IEM_MC_BEGIN(0, 1);
8718 IEM_MC_LOCAL(uint32_t, u32Value);
8719 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8720 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8721 IEM_MC_ADVANCE_RIP();
8722 IEM_MC_END();
8723 return VINF_SUCCESS;
8724
8725 case IEMMODE_64BIT:
8726 IEM_MC_BEGIN(0, 1);
8727 IEM_MC_LOCAL(uint64_t, u64Value);
8728 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8729 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8730 IEM_MC_ADVANCE_RIP();
8731 IEM_MC_END();
8732 return VINF_SUCCESS;
8733
8734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8735 }
8736 }
8737 else
8738 {
8739 /*
8740 * We're loading a register from memory.
8741 */
8742 switch (pVCpu->iem.s.enmEffOpSize)
8743 {
8744 case IEMMODE_16BIT:
8745 IEM_MC_BEGIN(0, 2);
8746 IEM_MC_LOCAL(uint16_t, u16Value);
8747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8750 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8751 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8752 IEM_MC_ADVANCE_RIP();
8753 IEM_MC_END();
8754 return VINF_SUCCESS;
8755
8756 case IEMMODE_32BIT:
8757 IEM_MC_BEGIN(0, 2);
8758 IEM_MC_LOCAL(uint32_t, u32Value);
8759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8762 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8763 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8764 IEM_MC_ADVANCE_RIP();
8765 IEM_MC_END();
8766 return VINF_SUCCESS;
8767
8768 case IEMMODE_64BIT:
8769 IEM_MC_BEGIN(0, 2);
8770 IEM_MC_LOCAL(uint64_t, u64Value);
8771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8774 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8775 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8776 IEM_MC_ADVANCE_RIP();
8777 IEM_MC_END();
8778 return VINF_SUCCESS;
8779
8780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8781 }
8782 }
8783}
8784
8785
8786/** Opcode 0x0f 0xbf. */
8787FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8788{
8789 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8790 IEMOP_HLP_MIN_386();
8791
8792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8793
8794 /** @todo Not entirely sure how the operand size prefix is handled here,
8795 * assuming that it will be ignored. Would be nice to have a few
8796 * test for this. */
8797 /*
8798 * If rm is denoting a register, no more instruction bytes.
8799 */
8800 if (IEM_IS_MODRM_REG_MODE(bRm))
8801 {
8802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8803 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8804 {
8805 IEM_MC_BEGIN(0, 1);
8806 IEM_MC_LOCAL(uint32_t, u32Value);
8807 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8808 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8809 IEM_MC_ADVANCE_RIP();
8810 IEM_MC_END();
8811 }
8812 else
8813 {
8814 IEM_MC_BEGIN(0, 1);
8815 IEM_MC_LOCAL(uint64_t, u64Value);
8816 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8817 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8818 IEM_MC_ADVANCE_RIP();
8819 IEM_MC_END();
8820 }
8821 }
8822 else
8823 {
8824 /*
8825 * We're loading a register from memory.
8826 */
8827 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8828 {
8829 IEM_MC_BEGIN(0, 2);
8830 IEM_MC_LOCAL(uint32_t, u32Value);
8831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8834 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8835 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8836 IEM_MC_ADVANCE_RIP();
8837 IEM_MC_END();
8838 }
8839 else
8840 {
8841 IEM_MC_BEGIN(0, 2);
8842 IEM_MC_LOCAL(uint64_t, u64Value);
8843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8846 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8847 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8848 IEM_MC_ADVANCE_RIP();
8849 IEM_MC_END();
8850 }
8851 }
8852 return VINF_SUCCESS;
8853}
8854
8855
8856/** Opcode 0x0f 0xc0. */
8857FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8858{
8859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8860 IEMOP_HLP_MIN_486();
8861 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8862
8863 /*
8864 * If rm is denoting a register, no more instruction bytes.
8865 */
8866 if (IEM_IS_MODRM_REG_MODE(bRm))
8867 {
8868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8869
8870 IEM_MC_BEGIN(3, 0);
8871 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8872 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8873 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8874
8875 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8876 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
8877 IEM_MC_REF_EFLAGS(pEFlags);
8878 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8879
8880 IEM_MC_ADVANCE_RIP();
8881 IEM_MC_END();
8882 }
8883 else
8884 {
8885 /*
8886 * We're accessing memory.
8887 */
8888 IEM_MC_BEGIN(3, 3);
8889 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8890 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8891 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8892 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8894
8895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8896 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8897 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
8898 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8899 IEM_MC_FETCH_EFLAGS(EFlags);
8900 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8901 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8902 else
8903 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8904
8905 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8906 IEM_MC_COMMIT_EFLAGS(EFlags);
8907 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
8908 IEM_MC_ADVANCE_RIP();
8909 IEM_MC_END();
8910 return VINF_SUCCESS;
8911 }
8912 return VINF_SUCCESS;
8913}
8914
8915
8916/** Opcode 0x0f 0xc1. */
8917FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8918{
8919 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8920 IEMOP_HLP_MIN_486();
8921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8922
8923 /*
8924 * If rm is denoting a register, no more instruction bytes.
8925 */
8926 if (IEM_IS_MODRM_REG_MODE(bRm))
8927 {
8928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8929
8930 switch (pVCpu->iem.s.enmEffOpSize)
8931 {
8932 case IEMMODE_16BIT:
8933 IEM_MC_BEGIN(3, 0);
8934 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8935 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8936 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8937
8938 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8939 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
8940 IEM_MC_REF_EFLAGS(pEFlags);
8941 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8942
8943 IEM_MC_ADVANCE_RIP();
8944 IEM_MC_END();
8945 return VINF_SUCCESS;
8946
8947 case IEMMODE_32BIT:
8948 IEM_MC_BEGIN(3, 0);
8949 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8950 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8951 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8952
8953 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8954 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
8955 IEM_MC_REF_EFLAGS(pEFlags);
8956 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8957
8958 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8959 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8960 IEM_MC_ADVANCE_RIP();
8961 IEM_MC_END();
8962 return VINF_SUCCESS;
8963
8964 case IEMMODE_64BIT:
8965 IEM_MC_BEGIN(3, 0);
8966 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8967 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8968 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8969
8970 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8971 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
8972 IEM_MC_REF_EFLAGS(pEFlags);
8973 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8974
8975 IEM_MC_ADVANCE_RIP();
8976 IEM_MC_END();
8977 return VINF_SUCCESS;
8978
8979 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8980 }
8981 }
8982 else
8983 {
8984 /*
8985 * We're accessing memory.
8986 */
8987 switch (pVCpu->iem.s.enmEffOpSize)
8988 {
8989 case IEMMODE_16BIT:
8990 IEM_MC_BEGIN(3, 3);
8991 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8992 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8993 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8994 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8996
8997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8998 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8999 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9000 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
9001 IEM_MC_FETCH_EFLAGS(EFlags);
9002 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9003 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9004 else
9005 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
9006
9007 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9008 IEM_MC_COMMIT_EFLAGS(EFlags);
9009 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
9010 IEM_MC_ADVANCE_RIP();
9011 IEM_MC_END();
9012 return VINF_SUCCESS;
9013
9014 case IEMMODE_32BIT:
9015 IEM_MC_BEGIN(3, 3);
9016 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9017 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9018 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9019 IEM_MC_LOCAL(uint32_t, u32RegCopy);
9020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9021
9022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9023 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9024 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9025 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
9026 IEM_MC_FETCH_EFLAGS(EFlags);
9027 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9029 else
9030 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
9031
9032 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9033 IEM_MC_COMMIT_EFLAGS(EFlags);
9034 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
9035 IEM_MC_ADVANCE_RIP();
9036 IEM_MC_END();
9037 return VINF_SUCCESS;
9038
9039 case IEMMODE_64BIT:
9040 IEM_MC_BEGIN(3, 3);
9041 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9042 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9043 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9044 IEM_MC_LOCAL(uint64_t, u64RegCopy);
9045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9046
9047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9048 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9049 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9050 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
9051 IEM_MC_FETCH_EFLAGS(EFlags);
9052 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9053 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9054 else
9055 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
9056
9057 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9058 IEM_MC_COMMIT_EFLAGS(EFlags);
9059 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
9060 IEM_MC_ADVANCE_RIP();
9061 IEM_MC_END();
9062 return VINF_SUCCESS;
9063
9064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9065 }
9066 }
9067}
9068
9069
9070/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
9071FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
9072/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
9073FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
9074/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
9075FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
9076/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
9077FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
9078
9079
9080/** Opcode 0x0f 0xc3. */
9081FNIEMOP_DEF(iemOp_movnti_My_Gy)
9082{
9083 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
9084
9085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9086
9087 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
9088 if (IEM_IS_MODRM_MEM_MODE(bRm))
9089 {
9090 switch (pVCpu->iem.s.enmEffOpSize)
9091 {
9092 case IEMMODE_32BIT:
9093 IEM_MC_BEGIN(0, 2);
9094 IEM_MC_LOCAL(uint32_t, u32Value);
9095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9096
9097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9099 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9100 return IEMOP_RAISE_INVALID_OPCODE();
9101
9102 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9103 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
9104 IEM_MC_ADVANCE_RIP();
9105 IEM_MC_END();
9106 break;
9107
9108 case IEMMODE_64BIT:
9109 IEM_MC_BEGIN(0, 2);
9110 IEM_MC_LOCAL(uint64_t, u64Value);
9111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9112
9113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9115 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9116 return IEMOP_RAISE_INVALID_OPCODE();
9117
9118 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9119 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
9120 IEM_MC_ADVANCE_RIP();
9121 IEM_MC_END();
9122 break;
9123
9124 case IEMMODE_16BIT:
9125 /** @todo check this form. */
9126 return IEMOP_RAISE_INVALID_OPCODE();
9127 }
9128 }
9129 else
9130 return IEMOP_RAISE_INVALID_OPCODE();
9131 return VINF_SUCCESS;
9132}
9133/* Opcode 0x66 0x0f 0xc3 - invalid */
9134/* Opcode 0xf3 0x0f 0xc3 - invalid */
9135/* Opcode 0xf2 0x0f 0xc3 - invalid */
9136
9137/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
9138FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
9139/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
9140FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
9141/* Opcode 0xf3 0x0f 0xc4 - invalid */
9142/* Opcode 0xf2 0x0f 0xc4 - invalid */
9143
9144/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
9145FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
9146/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
9147FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
9148/* Opcode 0xf3 0x0f 0xc5 - invalid */
9149/* Opcode 0xf2 0x0f 0xc5 - invalid */
9150
9151/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
9152FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
9153/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
9154FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
9155/* Opcode 0xf3 0x0f 0xc6 - invalid */
9156/* Opcode 0xf2 0x0f 0xc6 - invalid */
9157
9158
9159/** Opcode 0x0f 0xc7 !11/1. */
9160FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
9161{
9162 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
9163
9164 IEM_MC_BEGIN(4, 3);
9165 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
9166 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
9167 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
9168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9169 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
9170 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
9171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9172
9173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9174 IEMOP_HLP_DONE_DECODING();
9175 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9176
9177 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
9178 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
9179 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
9180
9181 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
9182 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
9183 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
9184
9185 IEM_MC_FETCH_EFLAGS(EFlags);
9186 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9187 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9188 else
9189 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9190
9191 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
9192 IEM_MC_COMMIT_EFLAGS(EFlags);
9193 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9194 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
9195 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
9196 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
9197 IEM_MC_ENDIF();
9198 IEM_MC_ADVANCE_RIP();
9199
9200 IEM_MC_END();
9201 return VINF_SUCCESS;
9202}
9203
9204
9205/** Opcode REX.W 0x0f 0xc7 !11/1. */
9206FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
9207{
9208 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
9209 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9210 {
9211#if 0
9212 RT_NOREF(bRm);
9213 IEMOP_BITCH_ABOUT_STUB();
9214 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9215#else
9216 IEM_MC_BEGIN(4, 3);
9217 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
9218 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
9219 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
9220 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9221 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
9222 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
9223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9224
9225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9226 IEMOP_HLP_DONE_DECODING();
9227 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
9228 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9229
9230 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
9231 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
9232 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
9233
9234 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
9235 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
9236 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
9237
9238 IEM_MC_FETCH_EFLAGS(EFlags);
9239# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
9240# if defined(RT_ARCH_AMD64)
9241 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9242# endif
9243 {
9244 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9245 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9246 else
9247 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9248 }
9249# if defined(RT_ARCH_AMD64)
9250 else
9251# endif
9252# endif
9253# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
9254 {
9255 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
9256 accesses and not all all atomic, which works fine on in UNI CPU guest
9257 configuration (ignoring DMA). If guest SMP is active we have no choice
9258 but to use a rendezvous callback here. Sigh. */
9259 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
9260 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9261 else
9262 {
9263 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9264 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
9265 }
9266 }
9267# endif
9268
9269 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
9270 IEM_MC_COMMIT_EFLAGS(EFlags);
9271 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9272 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
9273 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
9274 IEM_MC_ENDIF();
9275 IEM_MC_ADVANCE_RIP();
9276
9277 IEM_MC_END();
9278 return VINF_SUCCESS;
9279#endif
9280 }
9281 Log(("cmpxchg16b -> #UD\n"));
9282 return IEMOP_RAISE_INVALID_OPCODE();
9283}
9284
9285FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
9286{
9287 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
9288 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
9289 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
9290}
9291
9292/** Opcode 0x0f 0xc7 11/6. */
9293FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
9294
9295/** Opcode 0x0f 0xc7 !11/6. */
9296#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9297FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
9298{
9299 IEMOP_MNEMONIC(vmptrld, "vmptrld");
9300 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
9301 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
9302 IEM_MC_BEGIN(2, 0);
9303 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9304 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9306 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9307 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9308 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
9309 IEM_MC_END();
9310 return VINF_SUCCESS;
9311}
9312#else
9313FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
9314#endif
9315
9316/** Opcode 0x66 0x0f 0xc7 !11/6. */
9317#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9318FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
9319{
9320 IEMOP_MNEMONIC(vmclear, "vmclear");
9321 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
9322 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
9323 IEM_MC_BEGIN(2, 0);
9324 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9325 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9327 IEMOP_HLP_DONE_DECODING();
9328 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9329 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
9330 IEM_MC_END();
9331 return VINF_SUCCESS;
9332}
9333#else
9334FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
9335#endif
9336
9337/** Opcode 0xf3 0x0f 0xc7 !11/6. */
9338#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9339FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
9340{
9341 IEMOP_MNEMONIC(vmxon, "vmxon");
9342 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
9343 IEM_MC_BEGIN(2, 0);
9344 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9345 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9347 IEMOP_HLP_DONE_DECODING();
9348 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9349 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
9350 IEM_MC_END();
9351 return VINF_SUCCESS;
9352}
9353#else
9354FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
9355#endif
9356
9357/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
9358#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9359FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
9360{
9361 IEMOP_MNEMONIC(vmptrst, "vmptrst");
9362 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
9363 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
9364 IEM_MC_BEGIN(2, 0);
9365 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9366 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9368 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9369 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9370 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
9371 IEM_MC_END();
9372 return VINF_SUCCESS;
9373}
9374#else
9375FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
9376#endif
9377
9378/** Opcode 0x0f 0xc7 11/7. */
9379FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
9380
9381
9382/**
9383 * Group 9 jump table for register variant.
9384 */
9385IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
9386{ /* pfx: none, 066h, 0f3h, 0f2h */
9387 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9388 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
9389 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9390 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9391 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9392 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9393 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9394 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9395};
9396AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
9397
9398
9399/**
9400 * Group 9 jump table for memory variant.
9401 */
9402IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
9403{ /* pfx: none, 066h, 0f3h, 0f2h */
9404 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9405 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
9406 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9407 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9408 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9409 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9410 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
9411 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9412};
9413AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
9414
9415
9416/** Opcode 0x0f 0xc7. */
9417FNIEMOP_DEF(iemOp_Grp9)
9418{
9419 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
9420 if (IEM_IS_MODRM_REG_MODE(bRm))
9421 /* register, register */
9422 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9423 + pVCpu->iem.s.idxPrefix], bRm);
9424 /* memory, register */
9425 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9426 + pVCpu->iem.s.idxPrefix], bRm);
9427}
9428
9429
9430/**
9431 * Common 'bswap register' helper.
9432 */
9433FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
9434{
9435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9436 switch (pVCpu->iem.s.enmEffOpSize)
9437 {
9438 case IEMMODE_16BIT:
9439 IEM_MC_BEGIN(1, 0);
9440 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9441 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
9442 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
9443 IEM_MC_ADVANCE_RIP();
9444 IEM_MC_END();
9445 return VINF_SUCCESS;
9446
9447 case IEMMODE_32BIT:
9448 IEM_MC_BEGIN(1, 0);
9449 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9450 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9451 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9452 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
9453 IEM_MC_ADVANCE_RIP();
9454 IEM_MC_END();
9455 return VINF_SUCCESS;
9456
9457 case IEMMODE_64BIT:
9458 IEM_MC_BEGIN(1, 0);
9459 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9460 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9461 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
9462 IEM_MC_ADVANCE_RIP();
9463 IEM_MC_END();
9464 return VINF_SUCCESS;
9465
9466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9467 }
9468}
9469
9470
9471/** Opcode 0x0f 0xc8. */
9472FNIEMOP_DEF(iemOp_bswap_rAX_r8)
9473{
9474 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
9475 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
9476 prefix. REX.B is the correct prefix it appears. For a parallel
9477 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
9478 IEMOP_HLP_MIN_486();
9479 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
9480}
9481
9482
9483/** Opcode 0x0f 0xc9. */
9484FNIEMOP_DEF(iemOp_bswap_rCX_r9)
9485{
9486 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
9487 IEMOP_HLP_MIN_486();
9488 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
9489}
9490
9491
9492/** Opcode 0x0f 0xca. */
9493FNIEMOP_DEF(iemOp_bswap_rDX_r10)
9494{
9495 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
9496 IEMOP_HLP_MIN_486();
9497 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
9498}
9499
9500
9501/** Opcode 0x0f 0xcb. */
9502FNIEMOP_DEF(iemOp_bswap_rBX_r11)
9503{
9504 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
9505 IEMOP_HLP_MIN_486();
9506 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
9507}
9508
9509
9510/** Opcode 0x0f 0xcc. */
9511FNIEMOP_DEF(iemOp_bswap_rSP_r12)
9512{
9513 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
9514 IEMOP_HLP_MIN_486();
9515 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
9516}
9517
9518
9519/** Opcode 0x0f 0xcd. */
9520FNIEMOP_DEF(iemOp_bswap_rBP_r13)
9521{
9522 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
9523 IEMOP_HLP_MIN_486();
9524 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
9525}
9526
9527
9528/** Opcode 0x0f 0xce. */
9529FNIEMOP_DEF(iemOp_bswap_rSI_r14)
9530{
9531 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
9532 IEMOP_HLP_MIN_486();
9533 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
9534}
9535
9536
9537/** Opcode 0x0f 0xcf. */
9538FNIEMOP_DEF(iemOp_bswap_rDI_r15)
9539{
9540 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
9541 IEMOP_HLP_MIN_486();
9542 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
9543}
9544
9545
9546/* Opcode 0x0f 0xd0 - invalid */
9547/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
9548FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
9549/* Opcode 0xf3 0x0f 0xd0 - invalid */
9550/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
9551FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
9552
9553/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
9554FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
9555{
9556 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9557 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
9558}
9559
9560/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
9561FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
9562{
9563 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9564 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
9565}
9566
9567/* Opcode 0xf3 0x0f 0xd1 - invalid */
9568/* Opcode 0xf2 0x0f 0xd1 - invalid */
9569
9570/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
9571FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
9572{
9573 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
9574 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
9575}
9576
9577
9578/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
9579FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
9580{
9581 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9582 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
9583}
9584
9585
9586/* Opcode 0xf3 0x0f 0xd2 - invalid */
9587/* Opcode 0xf2 0x0f 0xd2 - invalid */
9588
9589/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
9590FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
9591{
9592 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9593 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
9594}
9595
9596
9597/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
9598FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
9599{
9600 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9601 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
9602}
9603
9604
9605/* Opcode 0xf3 0x0f 0xd3 - invalid */
9606/* Opcode 0xf2 0x0f 0xd3 - invalid */
9607
9608
9609/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
9610FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
9611{
9612 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9613 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
9614}
9615
9616
9617/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
9618FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
9619{
9620 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9621 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
9622}
9623
9624
9625/* Opcode 0xf3 0x0f 0xd4 - invalid */
9626/* Opcode 0xf2 0x0f 0xd4 - invalid */
9627
9628/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
9629FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
9630{
9631 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9632 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
9633}
9634
9635/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
9636FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
9637{
9638 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9639 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
9640}
9641
9642
9643/* Opcode 0xf3 0x0f 0xd5 - invalid */
9644/* Opcode 0xf2 0x0f 0xd5 - invalid */
9645
9646/* Opcode 0x0f 0xd6 - invalid */
9647
9648/**
9649 * @opcode 0xd6
9650 * @oppfx 0x66
9651 * @opcpuid sse2
9652 * @opgroup og_sse2_pcksclr_datamove
9653 * @opxcpttype none
9654 * @optest op1=-1 op2=2 -> op1=2
9655 * @optest op1=0 op2=-42 -> op1=-42
9656 */
9657FNIEMOP_DEF(iemOp_movq_Wq_Vq)
9658{
9659 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
9660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9661 if (IEM_IS_MODRM_REG_MODE(bRm))
9662 {
9663 /*
9664 * Register, register.
9665 */
9666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9667 IEM_MC_BEGIN(0, 2);
9668 IEM_MC_LOCAL(uint64_t, uSrc);
9669
9670 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9671 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
9672
9673 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
9674 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
9675
9676 IEM_MC_ADVANCE_RIP();
9677 IEM_MC_END();
9678 }
9679 else
9680 {
9681 /*
9682 * Memory, register.
9683 */
9684 IEM_MC_BEGIN(0, 2);
9685 IEM_MC_LOCAL(uint64_t, uSrc);
9686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9687
9688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9690 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9691 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9692
9693 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
9694 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9695
9696 IEM_MC_ADVANCE_RIP();
9697 IEM_MC_END();
9698 }
9699 return VINF_SUCCESS;
9700}
9701
9702
9703/**
9704 * @opcode 0xd6
9705 * @opcodesub 11 mr/reg
9706 * @oppfx f3
9707 * @opcpuid sse2
9708 * @opgroup og_sse2_simdint_datamove
9709 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9710 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9711 */
9712FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
9713{
9714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9715 if (IEM_IS_MODRM_REG_MODE(bRm))
9716 {
9717 /*
9718 * Register, register.
9719 */
9720 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9722 IEM_MC_BEGIN(0, 1);
9723 IEM_MC_LOCAL(uint64_t, uSrc);
9724
9725 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9726 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9727
9728 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
9729 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
9730 IEM_MC_FPU_TO_MMX_MODE();
9731
9732 IEM_MC_ADVANCE_RIP();
9733 IEM_MC_END();
9734 return VINF_SUCCESS;
9735 }
9736
9737 /**
9738 * @opdone
9739 * @opmnemonic udf30fd6mem
9740 * @opcode 0xd6
9741 * @opcodesub !11 mr/reg
9742 * @oppfx f3
9743 * @opunused intel-modrm
9744 * @opcpuid sse
9745 * @optest ->
9746 */
9747 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9748}
9749
9750
9751/**
9752 * @opcode 0xd6
9753 * @opcodesub 11 mr/reg
9754 * @oppfx f2
9755 * @opcpuid sse2
9756 * @opgroup og_sse2_simdint_datamove
9757 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9758 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9759 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
9760 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
9761 * @optest op1=-42 op2=0xfedcba9876543210
9762 * -> op1=0xfedcba9876543210 ftw=0xff
9763 */
9764FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
9765{
9766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9767 if (IEM_IS_MODRM_REG_MODE(bRm))
9768 {
9769 /*
9770 * Register, register.
9771 */
9772 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9774 IEM_MC_BEGIN(0, 1);
9775 IEM_MC_LOCAL(uint64_t, uSrc);
9776
9777 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9778 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9779
9780 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
9781 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
9782 IEM_MC_FPU_TO_MMX_MODE();
9783
9784 IEM_MC_ADVANCE_RIP();
9785 IEM_MC_END();
9786 return VINF_SUCCESS;
9787 }
9788
9789 /**
9790 * @opdone
9791 * @opmnemonic udf20fd6mem
9792 * @opcode 0xd6
9793 * @opcodesub !11 mr/reg
9794 * @oppfx f2
9795 * @opunused intel-modrm
9796 * @opcpuid sse
9797 * @optest ->
9798 */
9799 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9800}
9801
9802
9803/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
9804FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
9805{
9806 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9807 /* Docs says register only. */
9808 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
9809 {
9810 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9811 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
9812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9813 IEM_MC_BEGIN(2, 0);
9814 IEM_MC_ARG(uint64_t *, puDst, 0);
9815 IEM_MC_ARG(uint64_t const *, puSrc, 1);
9816 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9817 IEM_MC_PREPARE_FPU_USAGE();
9818 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
9819 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
9820 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
9821 IEM_MC_FPU_TO_MMX_MODE();
9822 IEM_MC_ADVANCE_RIP();
9823 IEM_MC_END();
9824 return VINF_SUCCESS;
9825 }
9826 return IEMOP_RAISE_INVALID_OPCODE();
9827}
9828
9829
9830/** Opcode 0x66 0x0f 0xd7 - */
9831FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
9832{
9833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9834 /* Docs says register only. */
9835 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
9836 {
9837 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9838 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
9839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9840 IEM_MC_BEGIN(2, 0);
9841 IEM_MC_ARG(uint64_t *, puDst, 0);
9842 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
9843 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9844 IEM_MC_PREPARE_SSE_USAGE();
9845 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
9846 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
9847 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
9848 IEM_MC_ADVANCE_RIP();
9849 IEM_MC_END();
9850 return VINF_SUCCESS;
9851 }
9852 return IEMOP_RAISE_INVALID_OPCODE();
9853}
9854
9855
9856/* Opcode 0xf3 0x0f 0xd7 - invalid */
9857/* Opcode 0xf2 0x0f 0xd7 - invalid */
9858
9859
9860/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9861FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
9862{
9863 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9864 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
9865}
9866
9867
9868/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
9869FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
9870{
9871 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9872 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
9873}
9874
9875
9876/* Opcode 0xf3 0x0f 0xd8 - invalid */
9877/* Opcode 0xf2 0x0f 0xd8 - invalid */
9878
9879/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9880FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
9881{
9882 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9883 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
9884}
9885
9886
9887/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9888FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
9889{
9890 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9891 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
9892}
9893
9894
9895/* Opcode 0xf3 0x0f 0xd9 - invalid */
9896/* Opcode 0xf2 0x0f 0xd9 - invalid */
9897
9898/** Opcode 0x0f 0xda - pminub Pq, Qq */
9899FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
9900{
9901 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
9902 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pminub_u64);
9903}
9904
9905
9906/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9907FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
9908{
9909 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
9910 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
9911}
9912
9913/* Opcode 0xf3 0x0f 0xda - invalid */
9914/* Opcode 0xf2 0x0f 0xda - invalid */
9915
9916/** Opcode 0x0f 0xdb - pand Pq, Qq */
9917FNIEMOP_DEF(iemOp_pand_Pq_Qq)
9918{
9919 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9920 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
9921}
9922
9923
9924/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
9925FNIEMOP_DEF(iemOp_pand_Vx_Wx)
9926{
9927 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9928 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
9929}
9930
9931
9932/* Opcode 0xf3 0x0f 0xdb - invalid */
9933/* Opcode 0xf2 0x0f 0xdb - invalid */
9934
9935/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9936FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
9937{
9938 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9939 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
9940}
9941
9942
9943/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9944FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
9945{
9946 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9947 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
9948}
9949
9950
9951/* Opcode 0xf3 0x0f 0xdc - invalid */
9952/* Opcode 0xf2 0x0f 0xdc - invalid */
9953
9954/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9955FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
9956{
9957 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9958 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
9959}
9960
9961
9962/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9963FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
9964{
9965 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9966 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
9967}
9968
9969
9970/* Opcode 0xf3 0x0f 0xdd - invalid */
9971/* Opcode 0xf2 0x0f 0xdd - invalid */
9972
9973/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9974FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
9975{
9976 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9977 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaxub_u64);
9978}
9979
9980
9981/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9982FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
9983{
9984 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9985 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
9986}
9987
9988/* Opcode 0xf3 0x0f 0xde - invalid */
9989/* Opcode 0xf2 0x0f 0xde - invalid */
9990
9991
9992/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9993FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
9994{
9995 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9996 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
9997}
9998
9999
10000/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
10001FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
10002{
10003 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10004 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
10005}
10006
10007
10008/* Opcode 0xf3 0x0f 0xdf - invalid */
10009/* Opcode 0xf2 0x0f 0xdf - invalid */
10010
10011/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
10012FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
10013/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
10014FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
10015/* Opcode 0xf3 0x0f 0xe0 - invalid */
10016/* Opcode 0xf2 0x0f 0xe0 - invalid */
10017
10018/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
10019FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
10020{
10021 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10022 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
10023}
10024
10025
10026/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
10027FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
10028{
10029 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10030 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
10031}
10032
10033/* Opcode 0xf3 0x0f 0xe1 - invalid */
10034/* Opcode 0xf2 0x0f 0xe1 - invalid */
10035
10036/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
10037FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
10038{
10039 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10040 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
10041}
10042
10043
10044/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
10045FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
10046{
10047 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10048 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
10049}
10050
10051/* Opcode 0xf3 0x0f 0xe2 - invalid */
10052/* Opcode 0xf2 0x0f 0xe2 - invalid */
10053
10054/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
10055FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
10056/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
10057FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
10058/* Opcode 0xf3 0x0f 0xe3 - invalid */
10059/* Opcode 0xf2 0x0f 0xe3 - invalid */
10060
10061/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
10062FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
10063/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
10064FNIEMOP_STUB(iemOp_pmulhuw_Vx_Wx);
10065/* Opcode 0xf3 0x0f 0xe4 - invalid */
10066/* Opcode 0xf2 0x0f 0xe4 - invalid */
10067
10068/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
10069FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
10070{
10071 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10072 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
10073}
10074
10075
10076/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
10077FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
10078{
10079 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10080 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
10081}
10082
10083
10084/* Opcode 0xf3 0x0f 0xe5 - invalid */
10085/* Opcode 0xf2 0x0f 0xe5 - invalid */
10086
10087/* Opcode 0x0f 0xe6 - invalid */
10088/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
10089FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
10090/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
10091FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
10092/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
10093FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
10094
10095
10096/**
10097 * @opcode 0xe7
10098 * @opcodesub !11 mr/reg
10099 * @oppfx none
10100 * @opcpuid sse
10101 * @opgroup og_sse1_cachect
10102 * @opxcpttype none
10103 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
10104 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10105 */
10106FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
10107{
10108 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10110 if (IEM_IS_MODRM_MEM_MODE(bRm))
10111 {
10112 /* Register, memory. */
10113 IEM_MC_BEGIN(0, 2);
10114 IEM_MC_LOCAL(uint64_t, uSrc);
10115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10116
10117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10119 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
10120 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10121
10122 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
10123 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10124 IEM_MC_FPU_TO_MMX_MODE();
10125
10126 IEM_MC_ADVANCE_RIP();
10127 IEM_MC_END();
10128 return VINF_SUCCESS;
10129 }
10130 /**
10131 * @opdone
10132 * @opmnemonic ud0fe7reg
10133 * @opcode 0xe7
10134 * @opcodesub 11 mr/reg
10135 * @oppfx none
10136 * @opunused immediate
10137 * @opcpuid sse
10138 * @optest ->
10139 */
10140 return IEMOP_RAISE_INVALID_OPCODE();
10141}
10142
10143/**
10144 * @opcode 0xe7
10145 * @opcodesub !11 mr/reg
10146 * @oppfx 0x66
10147 * @opcpuid sse2
10148 * @opgroup og_sse2_cachect
10149 * @opxcpttype 1
10150 * @optest op1=-1 op2=2 -> op1=2
10151 * @optest op1=0 op2=-42 -> op1=-42
10152 */
10153FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
10154{
10155 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10157 if (IEM_IS_MODRM_MEM_MODE(bRm))
10158 {
10159 /* Register, memory. */
10160 IEM_MC_BEGIN(0, 2);
10161 IEM_MC_LOCAL(RTUINT128U, uSrc);
10162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10163
10164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10166 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10167 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10168
10169 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10170 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10171
10172 IEM_MC_ADVANCE_RIP();
10173 IEM_MC_END();
10174 return VINF_SUCCESS;
10175 }
10176
10177 /**
10178 * @opdone
10179 * @opmnemonic ud660fe7reg
10180 * @opcode 0xe7
10181 * @opcodesub 11 mr/reg
10182 * @oppfx 0x66
10183 * @opunused immediate
10184 * @opcpuid sse
10185 * @optest ->
10186 */
10187 return IEMOP_RAISE_INVALID_OPCODE();
10188}
10189
10190/* Opcode 0xf3 0x0f 0xe7 - invalid */
10191/* Opcode 0xf2 0x0f 0xe7 - invalid */
10192
10193
10194/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
10195FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
10196{
10197 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10198 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
10199}
10200
10201
10202/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
10203FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
10204{
10205 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10206 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
10207}
10208
10209
10210/* Opcode 0xf3 0x0f 0xe8 - invalid */
10211/* Opcode 0xf2 0x0f 0xe8 - invalid */
10212
10213/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
10214FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
10215{
10216 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10217 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
10218}
10219
10220
10221/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
10222FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
10223{
10224 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10225 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
10226}
10227
10228
10229/* Opcode 0xf3 0x0f 0xe9 - invalid */
10230/* Opcode 0xf2 0x0f 0xe9 - invalid */
10231
10232/** Opcode 0x0f 0xea - pminsw Pq, Qq */
10233FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
10234/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
10235FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
10236/* Opcode 0xf3 0x0f 0xea - invalid */
10237/* Opcode 0xf2 0x0f 0xea - invalid */
10238
10239
10240/** Opcode 0x0f 0xeb - por Pq, Qq */
10241FNIEMOP_DEF(iemOp_por_Pq_Qq)
10242{
10243 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10244 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
10245}
10246
10247
10248/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
10249FNIEMOP_DEF(iemOp_por_Vx_Wx)
10250{
10251 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10252 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
10253}
10254
10255
10256/* Opcode 0xf3 0x0f 0xeb - invalid */
10257/* Opcode 0xf2 0x0f 0xeb - invalid */
10258
10259/** Opcode 0x0f 0xec - paddsb Pq, Qq */
10260FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
10261{
10262 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10263 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
10264}
10265
10266
10267/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
10268FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
10269{
10270 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10271 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
10272}
10273
10274
10275/* Opcode 0xf3 0x0f 0xec - invalid */
10276/* Opcode 0xf2 0x0f 0xec - invalid */
10277
10278/** Opcode 0x0f 0xed - paddsw Pq, Qq */
10279FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
10280{
10281 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10282 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
10283}
10284
10285
10286/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
10287FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
10288{
10289 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10290 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
10291}
10292
10293
10294/* Opcode 0xf3 0x0f 0xed - invalid */
10295/* Opcode 0xf2 0x0f 0xed - invalid */
10296
10297/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
10298FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
10299/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
10300FNIEMOP_STUB(iemOp_pmaxsw_Vx_Wx);
10301/* Opcode 0xf3 0x0f 0xee - invalid */
10302/* Opcode 0xf2 0x0f 0xee - invalid */
10303
10304
10305/** Opcode 0x0f 0xef - pxor Pq, Qq */
10306FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
10307{
10308 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10309 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
10310}
10311
10312
10313/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
10314FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
10315{
10316 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10317 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
10318}
10319
10320
10321/* Opcode 0xf3 0x0f 0xef - invalid */
10322/* Opcode 0xf2 0x0f 0xef - invalid */
10323
10324/* Opcode 0x0f 0xf0 - invalid */
10325/* Opcode 0x66 0x0f 0xf0 - invalid */
10326/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
10327FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
10328
10329
10330/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
10331FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
10332{
10333 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10334 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
10335}
10336
10337
10338/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
10339FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
10340{
10341 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10342 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
10343}
10344
10345
10346/* Opcode 0xf2 0x0f 0xf1 - invalid */
10347
10348/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
10349FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
10350{
10351 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10352 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
10353}
10354
10355
10356/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
10357FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
10358{
10359 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10360 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
10361}
10362
10363
10364/* Opcode 0xf2 0x0f 0xf2 - invalid */
10365
10366/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
10367FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
10368{
10369 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10370 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
10371}
10372
10373
10374/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
10375FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
10376{
10377 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10378 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
10379}
10380
10381/* Opcode 0xf2 0x0f 0xf3 - invalid */
10382
10383/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
10384FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
10385/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
10386FNIEMOP_STUB(iemOp_pmuludq_Vx_Wx);
10387/* Opcode 0xf2 0x0f 0xf4 - invalid */
10388
10389/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
10390FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
10391{
10392 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10393 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
10394}
10395
10396
10397/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
10398FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
10399{
10400 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10401 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
10402}
10403
10404/* Opcode 0xf2 0x0f 0xf5 - invalid */
10405
10406/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
10407FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
10408/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
10409FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
10410/* Opcode 0xf2 0x0f 0xf6 - invalid */
10411
10412/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
10413FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
10414/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
10415FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
10416/* Opcode 0xf2 0x0f 0xf7 - invalid */
10417
10418
10419/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
10420FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
10421{
10422 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10423 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
10424}
10425
10426
10427/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
10428FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
10429{
10430 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10431 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
10432}
10433
10434
10435/* Opcode 0xf2 0x0f 0xf8 - invalid */
10436
10437
10438/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
10439FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
10440{
10441 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10442 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
10443}
10444
10445
10446/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
10447FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
10448{
10449 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10450 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
10451}
10452
10453
10454/* Opcode 0xf2 0x0f 0xf9 - invalid */
10455
10456
10457/** Opcode 0x0f 0xfa - psubd Pq, Qq */
10458FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
10459{
10460 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10461 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
10462}
10463
10464
10465/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
10466FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
10467{
10468 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10469 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
10470}
10471
10472
10473/* Opcode 0xf2 0x0f 0xfa - invalid */
10474
10475
10476/** Opcode 0x0f 0xfb - psubq Pq, Qq */
10477FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
10478{
10479 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10480 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
10481}
10482
10483
10484/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
10485FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
10486{
10487 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10488 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
10489}
10490
10491
10492/* Opcode 0xf2 0x0f 0xfb - invalid */
10493
10494
10495/** Opcode 0x0f 0xfc - paddb Pq, Qq */
10496FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
10497{
10498 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10499 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
10500}
10501
10502
10503/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
10504FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
10505{
10506 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10507 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
10508}
10509
10510
10511/* Opcode 0xf2 0x0f 0xfc - invalid */
10512
10513
10514/** Opcode 0x0f 0xfd - paddw Pq, Qq */
10515FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
10516{
10517 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10518 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
10519}
10520
10521
10522/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
10523FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
10524{
10525 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10526 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
10527}
10528
10529
10530/* Opcode 0xf2 0x0f 0xfd - invalid */
10531
10532
10533/** Opcode 0x0f 0xfe - paddd Pq, Qq */
10534FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
10535{
10536 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10537 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
10538}
10539
10540
10541/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
10542FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
10543{
10544 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10545 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
10546}
10547
10548
10549/* Opcode 0xf2 0x0f 0xfe - invalid */
10550
10551
10552/** Opcode **** 0x0f 0xff - UD0 */
10553FNIEMOP_DEF(iemOp_ud0)
10554{
10555 IEMOP_MNEMONIC(ud0, "ud0");
10556 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
10557 {
10558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
10559#ifndef TST_IEM_CHECK_MC
10560 if (IEM_IS_MODRM_MEM_MODE(bRm))
10561 {
10562 RTGCPTR GCPtrEff;
10563 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
10564 if (rcStrict != VINF_SUCCESS)
10565 return rcStrict;
10566 }
10567#endif
10568 IEMOP_HLP_DONE_DECODING();
10569 }
10570 return IEMOP_RAISE_INVALID_OPCODE();
10571}
10572
10573
10574
10575/**
10576 * Two byte opcode map, first byte 0x0f.
10577 *
10578 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
10579 * check if it needs updating as well when making changes.
10580 */
10581IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
10582{
10583 /* no prefix, 066h prefix f3h prefix, f2h prefix */
10584 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
10585 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
10586 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
10587 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
10588 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
10589 /* 0x05 */ IEMOP_X4(iemOp_syscall),
10590 /* 0x06 */ IEMOP_X4(iemOp_clts),
10591 /* 0x07 */ IEMOP_X4(iemOp_sysret),
10592 /* 0x08 */ IEMOP_X4(iemOp_invd),
10593 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
10594 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
10595 /* 0x0b */ IEMOP_X4(iemOp_ud2),
10596 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
10597 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
10598 /* 0x0e */ IEMOP_X4(iemOp_femms),
10599 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
10600
10601 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
10602 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
10603 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
10604 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10605 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10606 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10607 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
10608 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10609 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
10610 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
10611 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
10612 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
10613 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
10614 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
10615 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
10616 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
10617
10618 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
10619 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
10620 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
10621 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
10622 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
10623 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
10624 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
10625 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
10626 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10627 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10628 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
10629 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10630 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
10631 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
10632 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10633 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10634
10635 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
10636 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
10637 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
10638 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
10639 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
10640 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
10641 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
10642 /* 0x37 */ IEMOP_X4(iemOp_getsec),
10643 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
10644 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
10645 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
10646 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
10647 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
10648 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
10649 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
10650 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
10651
10652 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
10653 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
10654 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
10655 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
10656 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
10657 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
10658 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
10659 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
10660 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
10661 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
10662 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
10663 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
10664 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
10665 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
10666 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
10667 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
10668
10669 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10670 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
10671 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
10672 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
10673 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10674 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10675 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10676 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10677 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
10678 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
10679 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
10680 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
10681 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
10682 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
10683 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
10684 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
10685
10686 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10687 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10688 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10689 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10690 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10691 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10692 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10693 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10694 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10695 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10696 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10697 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10698 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10699 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10700 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10701 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
10702
10703 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
10704 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
10705 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
10706 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
10707 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10708 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10709 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10710 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10711
10712 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10713 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10714 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10715 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10716 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
10717 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
10718 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
10719 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
10720
10721 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
10722 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
10723 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
10724 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
10725 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
10726 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
10727 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
10728 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
10729 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
10730 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
10731 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
10732 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
10733 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
10734 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
10735 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
10736 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
10737
10738 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
10739 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
10740 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
10741 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
10742 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
10743 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
10744 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
10745 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
10746 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
10747 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
10748 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
10749 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
10750 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
10751 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
10752 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
10753 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
10754
10755 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
10756 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
10757 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
10758 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
10759 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
10760 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
10761 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
10762 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
10763 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
10764 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
10765 /* 0xaa */ IEMOP_X4(iemOp_rsm),
10766 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
10767 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
10768 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
10769 /* 0xae */ IEMOP_X4(iemOp_Grp15),
10770 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
10771
10772 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
10773 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
10774 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
10775 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
10776 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
10777 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
10778 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
10779 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
10780 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
10781 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
10782 /* 0xba */ IEMOP_X4(iemOp_Grp8),
10783 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
10784 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
10785 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
10786 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
10787 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
10788
10789 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
10790 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
10791 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
10792 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10793 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10794 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10795 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10796 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
10797 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
10798 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
10799 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
10800 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
10801 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
10802 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
10803 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
10804 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
10805
10806 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
10807 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10808 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10809 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10810 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10811 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10812 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
10813 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10814 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10815 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10816 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10817 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10818 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10819 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10820 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10821 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10822
10823 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10824 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10825 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10826 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10827 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10828 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10829 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
10830 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10831 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10832 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10833 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10834 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10835 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10836 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10837 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10838 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10839
10840 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
10841 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10842 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10843 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10844 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10845 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10846 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10847 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10848 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10849 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10850 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10851 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10852 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10853 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10854 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10855 /* 0xff */ IEMOP_X4(iemOp_ud0),
10856};
10857AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
10858
10859/** @} */
10860
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette