VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 105456

Last change on this file since 105456 was 105445, checked in by vboxsync, 6 months ago

VMM/IEM: Fold IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT() into IEM_MC_CALL_SSE_AIMPL_X()/IEM_MC_CALL_AVX_AIMPL_X(), bugref:10652

The current way raising exceptions doesn't work as the IEM would raise an #XF/#UD if an exception is unmasked and the corresponding
exception status flag is set, even if the current instruction wouldn't generate that exception.
The Intel Architecture manual states that exception flags are sticky and need manual clearing through ldmxcsr/xrstor but an exception
is only generated from an internal set of flags for the current operation. In order to avoid introducing temporary MXCSR values increasing
the overhead for native emitters later on exception status calculation and raising is now done in the IEM_MC_CALL_SSE_AIMPL_X() and
IEM_MC_CALL_AVX_AIMPL_X() IEM microcode statements.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 514.6 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 105445 2024-07-23 12:17:44Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * A body preprocessor variant of iemOpCommonSse2Opt_FullFull_To_Full in order
337 * to support native emitters for certain instructions.
338 */
339#define SSE2_OPT_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
340 PFNIEMAIMPLMEDIAOPTF2U128 const pfnU128 = (a_pImplExpr); \
341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
342 if (IEM_IS_MODRM_REG_MODE(bRm)) \
343 { \
344 /* \
345 * XMM, XMM. \
346 */ \
347 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
350 IEM_MC_PREPARE_SSE_USAGE(); \
351 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
352 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
353 } IEM_MC_NATIVE_ELSE() { \
354 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
355 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
356 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); \
357 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
358 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
359 } IEM_MC_NATIVE_ENDIF(); \
360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
361 IEM_MC_END(); \
362 } \
363 else \
364 { \
365 /* \
366 * XMM, [mem128]. \
367 */ \
368 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
369 IEM_MC_LOCAL(RTUINT128U, uSrc); \
370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
373 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
374 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
375 IEM_MC_PREPARE_SSE_USAGE(); \
376 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
377 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); \
378 } IEM_MC_NATIVE_ELSE() { \
379 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
380 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
381 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); \
382 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
383 } IEM_MC_NATIVE_ENDIF(); \
384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
385 IEM_MC_END(); \
386 } void(0)
387
388
389/**
390 * Common worker for MMX instructions on the forms:
391 * pxxxx mm1, mm2/mem32
392 *
393 * The 2nd operand is the first half of a register, which in the memory case
394 * means a 32-bit memory access.
395 */
396FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
397{
398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
399 if (IEM_IS_MODRM_REG_MODE(bRm))
400 {
401 /*
402 * MMX, MMX.
403 */
404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
406 IEM_MC_ARG(uint64_t *, puDst, 0);
407 IEM_MC_ARG(uint64_t const *, puSrc, 1);
408 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
409 IEM_MC_PREPARE_FPU_USAGE();
410 IEM_MC_FPU_TO_MMX_MODE();
411
412 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
413 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
414 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
415 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
416
417 IEM_MC_ADVANCE_RIP_AND_FINISH();
418 IEM_MC_END();
419 }
420 else
421 {
422 /*
423 * MMX, [mem32].
424 */
425 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
426 IEM_MC_ARG(uint64_t *, puDst, 0);
427 IEM_MC_LOCAL(uint64_t, uSrc);
428 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
433 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
434 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
435
436 IEM_MC_PREPARE_FPU_USAGE();
437 IEM_MC_FPU_TO_MMX_MODE();
438
439 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
440 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
441 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
442
443 IEM_MC_ADVANCE_RIP_AND_FINISH();
444 IEM_MC_END();
445 }
446}
447
448
449/**
450 * Common worker for SSE instructions on the forms:
451 * pxxxx xmm1, xmm2/mem128
452 *
453 * The 2nd operand is the first half of a register, which in the memory case
454 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
455 *
456 * Exceptions type 4.
457 */
458FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
459{
460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
461 if (IEM_IS_MODRM_REG_MODE(bRm))
462 {
463 /*
464 * XMM, XMM.
465 */
466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
468 IEM_MC_ARG(PRTUINT128U, puDst, 0);
469 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
470 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
471 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
473 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
474 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478 else
479 {
480 /*
481 * XMM, [mem128].
482 */
483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
484 IEM_MC_ARG(PRTUINT128U, puDst, 0);
485 IEM_MC_LOCAL(RTUINT128U, uSrc);
486 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
488
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
492 /** @todo Most CPUs probably only read the low qword. We read everything to
493 * make sure we apply segmentation and alignment checks correctly.
494 * When we have time, it would be interesting to explore what real
495 * CPUs actually does and whether it will do a TLB load for the high
496 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
497 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
498
499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
500 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
501 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
502
503 IEM_MC_ADVANCE_RIP_AND_FINISH();
504 IEM_MC_END();
505 }
506}
507
508
509/**
510 * Common worker for SSE2 instructions on the forms:
511 * pxxxx xmm1, xmm2/mem128
512 *
513 * The 2nd operand is the first half of a register, which in the memory case
514 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
515 *
516 * Exceptions type 4.
517 */
518FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
519{
520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
521 if (IEM_IS_MODRM_REG_MODE(bRm))
522 {
523 /*
524 * XMM, XMM.
525 */
526 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
528 IEM_MC_ARG(PRTUINT128U, puDst, 0);
529 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
531 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
532 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
533 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
534 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
535 IEM_MC_ADVANCE_RIP_AND_FINISH();
536 IEM_MC_END();
537 }
538 else
539 {
540 /*
541 * XMM, [mem128].
542 */
543 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
544 IEM_MC_ARG(PRTUINT128U, puDst, 0);
545 IEM_MC_LOCAL(RTUINT128U, uSrc);
546 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
548
549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
552 /** @todo Most CPUs probably only read the low qword. We read everything to
553 * make sure we apply segmentation and alignment checks correctly.
554 * When we have time, it would be interesting to explore what real
555 * CPUs actually does and whether it will do a TLB load for the high
556 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
557 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
558
559 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
560 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
562
563 IEM_MC_ADVANCE_RIP_AND_FINISH();
564 IEM_MC_END();
565 }
566}
567
568
569/**
570 * Common worker for MMX instructions on the form:
571 * pxxxx mm1, mm2/mem64
572 *
573 * The 2nd operand is the second half of a register, which in the memory case
574 * means a 64-bit memory access for MMX.
575 */
576FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
577{
578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
579 if (IEM_IS_MODRM_REG_MODE(bRm))
580 {
581 /*
582 * MMX, MMX.
583 */
584 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
585 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
586 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
588 IEM_MC_ARG(uint64_t *, puDst, 0);
589 IEM_MC_ARG(uint64_t const *, puSrc, 1);
590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
591 IEM_MC_PREPARE_FPU_USAGE();
592 IEM_MC_FPU_TO_MMX_MODE();
593
594 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
595 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
596 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
597 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
598
599 IEM_MC_ADVANCE_RIP_AND_FINISH();
600 IEM_MC_END();
601 }
602 else
603 {
604 /*
605 * MMX, [mem64].
606 */
607 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
608 IEM_MC_ARG(uint64_t *, puDst, 0);
609 IEM_MC_LOCAL(uint64_t, uSrc);
610 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
612
613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
615 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
616 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
617
618 IEM_MC_PREPARE_FPU_USAGE();
619 IEM_MC_FPU_TO_MMX_MODE();
620
621 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
623 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
624
625 IEM_MC_ADVANCE_RIP_AND_FINISH();
626 IEM_MC_END();
627 }
628}
629
630
631/**
632 * Common worker for SSE instructions on the form:
633 * pxxxx xmm1, xmm2/mem128
634 *
635 * The 2nd operand is the second half of a register, which for SSE a 128-bit
636 * aligned access where it may read the full 128 bits or only the upper 64 bits.
637 *
638 * Exceptions type 4.
639 */
640FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
641{
642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
643 if (IEM_IS_MODRM_REG_MODE(bRm))
644 {
645 /*
646 * XMM, XMM.
647 */
648 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
650 IEM_MC_ARG(PRTUINT128U, puDst, 0);
651 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
653 IEM_MC_PREPARE_SSE_USAGE();
654 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
655 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
656 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
657 IEM_MC_ADVANCE_RIP_AND_FINISH();
658 IEM_MC_END();
659 }
660 else
661 {
662 /*
663 * XMM, [mem128].
664 */
665 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
666 IEM_MC_ARG(PRTUINT128U, puDst, 0);
667 IEM_MC_LOCAL(RTUINT128U, uSrc);
668 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
670
671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
674 /** @todo Most CPUs probably only read the high qword. We read everything to
675 * make sure we apply segmentation and alignment checks correctly.
676 * When we have time, it would be interesting to explore what real
677 * CPUs actually does and whether it will do a TLB load for the lower
678 * part or skip any associated \#PF. */
679 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
680
681 IEM_MC_PREPARE_SSE_USAGE();
682 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
683 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
684
685 IEM_MC_ADVANCE_RIP_AND_FINISH();
686 IEM_MC_END();
687 }
688}
689
690
691/**
692 * Common worker for SSE instructions on the forms:
693 * pxxs xmm1, xmm2/mem128
694 *
695 * Proper alignment of the 128-bit operand is enforced.
696 * Exceptions type 2. SSE cpuid checks.
697 *
698 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
699 */
700FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
701{
702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
703 if (IEM_IS_MODRM_REG_MODE(bRm))
704 {
705 /*
706 * XMM128, XMM128.
707 */
708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
710 IEM_MC_LOCAL(X86XMMREG, SseRes);
711 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
712 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
713 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
715 IEM_MC_PREPARE_SSE_USAGE();
716 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
717 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
718 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
719 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
720
721 IEM_MC_ADVANCE_RIP_AND_FINISH();
722 IEM_MC_END();
723 }
724 else
725 {
726 /*
727 * XMM128, [mem128].
728 */
729 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
730 IEM_MC_LOCAL(X86XMMREG, SseRes);
731 IEM_MC_LOCAL(X86XMMREG, uSrc2);
732 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
733 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
734 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736
737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
739 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
740 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
741
742 IEM_MC_PREPARE_SSE_USAGE();
743 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
744 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
745 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
746
747 IEM_MC_ADVANCE_RIP_AND_FINISH();
748 IEM_MC_END();
749 }
750}
751
752
753/**
754 * Common worker for SSE instructions on the forms:
755 * pxxs xmm1, xmm2/mem32
756 *
757 * Proper alignment of the 128-bit operand is enforced.
758 * Exceptions type 3. SSE cpuid checks.
759 *
760 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
761 */
762FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
763{
764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
765 if (IEM_IS_MODRM_REG_MODE(bRm))
766 {
767 /*
768 * XMM128, XMM32.
769 */
770 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
772 IEM_MC_LOCAL(X86XMMREG, SseRes);
773 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
774 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
775 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
776 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
777 IEM_MC_PREPARE_SSE_USAGE();
778 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
779 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
780 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
781 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
782
783 IEM_MC_ADVANCE_RIP_AND_FINISH();
784 IEM_MC_END();
785 }
786 else
787 {
788 /*
789 * XMM128, [mem32].
790 */
791 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
792 IEM_MC_LOCAL(X86XMMREG, SseRes);
793 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
794 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
795 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
796 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
798
799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
801 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
802 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
803
804 IEM_MC_PREPARE_SSE_USAGE();
805 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
806 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
807 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
808
809 IEM_MC_ADVANCE_RIP_AND_FINISH();
810 IEM_MC_END();
811 }
812}
813
814
815/**
816 * Common worker for SSE2 instructions on the forms:
817 * pxxd xmm1, xmm2/mem128
818 *
819 * Proper alignment of the 128-bit operand is enforced.
820 * Exceptions type 2. SSE cpuid checks.
821 *
822 * @sa iemOpCommonSseFp_FullFull_To_Full
823 */
824FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
825{
826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
827 if (IEM_IS_MODRM_REG_MODE(bRm))
828 {
829 /*
830 * XMM128, XMM128.
831 */
832 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
834 IEM_MC_LOCAL(X86XMMREG, SseRes);
835 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
836 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
837 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
838 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
839 IEM_MC_PREPARE_SSE_USAGE();
840 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
842 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
843 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
844
845 IEM_MC_ADVANCE_RIP_AND_FINISH();
846 IEM_MC_END();
847 }
848 else
849 {
850 /*
851 * XMM128, [mem128].
852 */
853 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
854 IEM_MC_LOCAL(X86XMMREG, SseRes);
855 IEM_MC_LOCAL(X86XMMREG, uSrc2);
856 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
857 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
858 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
860
861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
863 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
864 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865
866 IEM_MC_PREPARE_SSE_USAGE();
867 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
869 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
870
871 IEM_MC_ADVANCE_RIP_AND_FINISH();
872 IEM_MC_END();
873 }
874}
875
876
877/**
878 * Common worker for SSE2 instructions on the forms:
879 * pxxs xmm1, xmm2/mem64
880 *
881 * Proper alignment of the 128-bit operand is enforced.
882 * Exceptions type 3. SSE2 cpuid checks.
883 *
884 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
885 */
886FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
887{
888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
889 if (IEM_IS_MODRM_REG_MODE(bRm))
890 {
891 /*
892 * XMM, XMM.
893 */
894 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
896 IEM_MC_LOCAL(X86XMMREG, SseRes);
897 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
898 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
899 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
900 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
901 IEM_MC_PREPARE_SSE_USAGE();
902 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
903 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
904 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
905 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM, [mem64].
914 */
915 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(X86XMMREG, SseRes);
917 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
918 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
931 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
932
933 IEM_MC_ADVANCE_RIP_AND_FINISH();
934 IEM_MC_END();
935 }
936}
937
938
939/**
940 * Common worker for SSE2 instructions on the form:
941 * pxxxx xmm1, xmm2/mem128
942 *
943 * The 2nd operand is the second half of a register, which for SSE a 128-bit
944 * aligned access where it may read the full 128 bits or only the upper 64 bits.
945 *
946 * Exceptions type 4.
947 */
948FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
949{
950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
951 if (IEM_IS_MODRM_REG_MODE(bRm))
952 {
953 /*
954 * XMM, XMM.
955 */
956 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
958 IEM_MC_ARG(PRTUINT128U, puDst, 0);
959 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
960 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
961 IEM_MC_PREPARE_SSE_USAGE();
962 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
963 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
964 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
965 IEM_MC_ADVANCE_RIP_AND_FINISH();
966 IEM_MC_END();
967 }
968 else
969 {
970 /*
971 * XMM, [mem128].
972 */
973 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
974 IEM_MC_ARG(PRTUINT128U, puDst, 0);
975 IEM_MC_LOCAL(RTUINT128U, uSrc);
976 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
978
979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
981 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
982 /** @todo Most CPUs probably only read the high qword. We read everything to
983 * make sure we apply segmentation and alignment checks correctly.
984 * When we have time, it would be interesting to explore what real
985 * CPUs actually does and whether it will do a TLB load for the lower
986 * part or skip any associated \#PF. */
987 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
988
989 IEM_MC_PREPARE_SSE_USAGE();
990 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
991 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
992
993 IEM_MC_ADVANCE_RIP_AND_FINISH();
994 IEM_MC_END();
995 }
996}
997
998
999/**
1000 * Common worker for SSE3 instructions on the forms:
1001 * hxxx xmm1, xmm2/mem128
1002 *
1003 * Proper alignment of the 128-bit operand is enforced.
1004 * Exceptions type 2. SSE3 cpuid checks.
1005 *
1006 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1007 */
1008FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1009{
1010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1011 if (IEM_IS_MODRM_REG_MODE(bRm))
1012 {
1013 /*
1014 * XMM, XMM.
1015 */
1016 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1018 IEM_MC_LOCAL(X86XMMREG, SseRes);
1019 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1020 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1021 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1022 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1023 IEM_MC_PREPARE_SSE_USAGE();
1024 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1025 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1026 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1027 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1028
1029 IEM_MC_ADVANCE_RIP_AND_FINISH();
1030 IEM_MC_END();
1031 }
1032 else
1033 {
1034 /*
1035 * XMM, [mem128].
1036 */
1037 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1038 IEM_MC_LOCAL(X86XMMREG, SseRes);
1039 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1040 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1041 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1042 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1044
1045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1047 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1048 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1049
1050 IEM_MC_PREPARE_SSE_USAGE();
1051 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1052 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1053 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1054
1055 IEM_MC_ADVANCE_RIP_AND_FINISH();
1056 IEM_MC_END();
1057 }
1058}
1059
1060
1061/** Opcode 0x0f 0x00 /0. */
1062FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1063{
1064 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1065 IEMOP_HLP_MIN_286();
1066 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1067
1068 if (IEM_IS_MODRM_REG_MODE(bRm))
1069 {
1070 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1071 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1072 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1073 }
1074
1075 /* Ignore operand size here, memory refs are always 16-bit. */
1076 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1077 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1079 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1080 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1081 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1082 IEM_MC_END();
1083}
1084
1085
1086/** Opcode 0x0f 0x00 /1. */
1087FNIEMOPRM_DEF(iemOp_Grp6_str)
1088{
1089 IEMOP_MNEMONIC(str, "str Rv/Mw");
1090 IEMOP_HLP_MIN_286();
1091 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1092
1093
1094 if (IEM_IS_MODRM_REG_MODE(bRm))
1095 {
1096 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1097 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1098 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1099 }
1100
1101 /* Ignore operand size here, memory refs are always 16-bit. */
1102 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1103 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1106 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1107 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1108 IEM_MC_END();
1109}
1110
1111
1112/** Opcode 0x0f 0x00 /2. */
1113FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1114{
1115 IEMOP_MNEMONIC(lldt, "lldt Ew");
1116 IEMOP_HLP_MIN_286();
1117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1118
1119 if (IEM_IS_MODRM_REG_MODE(bRm))
1120 {
1121 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1122 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1123 IEM_MC_ARG(uint16_t, u16Sel, 0);
1124 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1125 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1126 IEM_MC_END();
1127 }
1128 else
1129 {
1130 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1131 IEM_MC_ARG(uint16_t, u16Sel, 0);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1134 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1137 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1138 IEM_MC_END();
1139 }
1140}
1141
1142
1143/** Opcode 0x0f 0x00 /3. */
1144FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1145{
1146 IEMOP_MNEMONIC(ltr, "ltr Ew");
1147 IEMOP_HLP_MIN_286();
1148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1149
1150 if (IEM_IS_MODRM_REG_MODE(bRm))
1151 {
1152 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1154 IEM_MC_ARG(uint16_t, u16Sel, 0);
1155 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1156 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1157 IEM_MC_END();
1158 }
1159 else
1160 {
1161 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1162 IEM_MC_ARG(uint16_t, u16Sel, 0);
1163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1167 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1168 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1169 IEM_MC_END();
1170 }
1171}
1172
1173
1174/* Need to associate flag info with the blocks, so duplicate the code. */
1175#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1176 IEMOP_HLP_MIN_286(); \
1177 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1178 \
1179 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1180 { \
1181 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1182 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1183 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1184 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1185 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1186 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1187 IEM_MC_END(); \
1188 } \
1189 else \
1190 { \
1191 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1192 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1193 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1196 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1197 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1198 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1199 IEM_MC_END(); \
1200 } (void)0
1201
1202/**
1203 * @opmaps grp6
1204 * @opcode /4
1205 * @opflmodify zf
1206 */
1207FNIEMOPRM_DEF(iemOp_Grp6_verr)
1208{
1209 IEMOP_MNEMONIC(verr, "verr Ew");
1210 IEMOP_BODY_GRP6_VERX(bRm, false);
1211}
1212
1213
1214/**
1215 * @opmaps grp6
1216 * @opcode /5
1217 * @opflmodify zf
1218 */
1219FNIEMOPRM_DEF(iemOp_Grp6_verw)
1220{
1221 IEMOP_MNEMONIC(verw, "verw Ew");
1222 IEMOP_BODY_GRP6_VERX(bRm, true);
1223}
1224
1225
1226/**
1227 * Group 6 jump table.
1228 */
1229IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1230{
1231 iemOp_Grp6_sldt,
1232 iemOp_Grp6_str,
1233 iemOp_Grp6_lldt,
1234 iemOp_Grp6_ltr,
1235 iemOp_Grp6_verr,
1236 iemOp_Grp6_verw,
1237 iemOp_InvalidWithRM,
1238 iemOp_InvalidWithRM
1239};
1240
1241/** Opcode 0x0f 0x00. */
1242FNIEMOP_DEF(iemOp_Grp6)
1243{
1244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1245 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1246}
1247
1248
1249/** Opcode 0x0f 0x01 /0. */
1250FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1251{
1252 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1253 IEMOP_HLP_MIN_286();
1254 IEMOP_HLP_64BIT_OP_SIZE();
1255 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1256 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1259 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1260 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1261 IEM_MC_END();
1262}
1263
1264
1265/** Opcode 0x0f 0x01 /0. */
1266FNIEMOP_DEF(iemOp_Grp7_vmcall)
1267{
1268 IEMOP_MNEMONIC(vmcall, "vmcall");
1269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1270
1271 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1272 want all hypercalls regardless of instruction used, and if a
1273 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1274 (NEM/win makes ASSUMPTIONS about this behavior.) */
1275 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1276}
1277
1278
1279/** Opcode 0x0f 0x01 /0. */
1280#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1281FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1282{
1283 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1284 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1285 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1286 IEMOP_HLP_DONE_DECODING();
1287 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1288 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1289 iemCImpl_vmlaunch);
1290}
1291#else
1292FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1293{
1294 IEMOP_BITCH_ABOUT_STUB();
1295 IEMOP_RAISE_INVALID_OPCODE_RET();
1296}
1297#endif
1298
1299
1300/** Opcode 0x0f 0x01 /0. */
1301#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1302FNIEMOP_DEF(iemOp_Grp7_vmresume)
1303{
1304 IEMOP_MNEMONIC(vmresume, "vmresume");
1305 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1306 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1307 IEMOP_HLP_DONE_DECODING();
1308 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1309 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1310 iemCImpl_vmresume);
1311}
1312#else
1313FNIEMOP_DEF(iemOp_Grp7_vmresume)
1314{
1315 IEMOP_BITCH_ABOUT_STUB();
1316 IEMOP_RAISE_INVALID_OPCODE_RET();
1317}
1318#endif
1319
1320
1321/** Opcode 0x0f 0x01 /0. */
1322#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1323FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1324{
1325 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1326 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1327 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1328 IEMOP_HLP_DONE_DECODING();
1329 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1330}
1331#else
1332FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1333{
1334 IEMOP_BITCH_ABOUT_STUB();
1335 IEMOP_RAISE_INVALID_OPCODE_RET();
1336}
1337#endif
1338
1339
1340/** Opcode 0x0f 0x01 /1. */
1341FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1342{
1343 IEMOP_MNEMONIC(sidt, "sidt Ms");
1344 IEMOP_HLP_MIN_286();
1345 IEMOP_HLP_64BIT_OP_SIZE();
1346 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1347 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1350 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1351 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1352 IEM_MC_END();
1353}
1354
1355
1356/** Opcode 0x0f 0x01 /1. */
1357FNIEMOP_DEF(iemOp_Grp7_monitor)
1358{
1359 IEMOP_MNEMONIC(monitor, "monitor");
1360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1361 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1362}
1363
1364
1365/** Opcode 0x0f 0x01 /1. */
1366FNIEMOP_DEF(iemOp_Grp7_mwait)
1367{
1368 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1370 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1371}
1372
1373
1374/** Opcode 0x0f 0x01 /2. */
1375FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1376{
1377 IEMOP_MNEMONIC(lgdt, "lgdt");
1378 IEMOP_HLP_64BIT_OP_SIZE();
1379 IEM_MC_BEGIN(0, 0);
1380 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1383 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1384 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1385 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1386 IEM_MC_END();
1387}
1388
1389
1390/** Opcode 0x0f 0x01 0xd0. */
1391FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1392{
1393 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1394 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1395 {
1396 /** @todo r=ramshankar: We should use
1397 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1398 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1399/** @todo testcase: test prefixes and exceptions. currently not checking for the
1400 * OPSIZE one ... */
1401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1402 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1403 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1404 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1405 iemCImpl_xgetbv);
1406 }
1407 IEMOP_RAISE_INVALID_OPCODE_RET();
1408}
1409
1410
1411/** Opcode 0x0f 0x01 0xd1. */
1412FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1413{
1414 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1415 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1416 {
1417 /** @todo r=ramshankar: We should use
1418 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1419 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1420/** @todo testcase: test prefixes and exceptions. currently not checking for the
1421 * OPSIZE one ... */
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1423 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1424 }
1425 IEMOP_RAISE_INVALID_OPCODE_RET();
1426}
1427
1428
1429/** Opcode 0x0f 0x01 /3. */
1430FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1431{
1432 IEMOP_MNEMONIC(lidt, "lidt");
1433 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1434 IEM_MC_BEGIN(0, 0);
1435 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1438 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1439 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1440 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1441 IEM_MC_END();
1442}
1443
1444
1445/** Opcode 0x0f 0x01 0xd8. */
1446#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1447FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1448{
1449 IEMOP_MNEMONIC(vmrun, "vmrun");
1450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1451 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1452 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1453 iemCImpl_vmrun);
1454}
1455#else
1456FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1457#endif
1458
1459/** Opcode 0x0f 0x01 0xd9. */
1460FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1461{
1462 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1463 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1464 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1465 * here cannot be right... */
1466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1467
1468 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1469 want all hypercalls regardless of instruction used, and if a
1470 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1471 (NEM/win makes ASSUMPTIONS about this behavior.) */
1472 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1473}
1474
1475/** Opcode 0x0f 0x01 0xda. */
1476#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1477FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1478{
1479 IEMOP_MNEMONIC(vmload, "vmload");
1480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1481 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1482}
1483#else
1484FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1485#endif
1486
1487
1488/** Opcode 0x0f 0x01 0xdb. */
1489#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1490FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1491{
1492 IEMOP_MNEMONIC(vmsave, "vmsave");
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1494 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1495}
1496#else
1497FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1498#endif
1499
1500
1501/** Opcode 0x0f 0x01 0xdc. */
1502#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1503FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1504{
1505 IEMOP_MNEMONIC(stgi, "stgi");
1506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1507 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1508}
1509#else
1510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1511#endif
1512
1513
1514/** Opcode 0x0f 0x01 0xdd. */
1515#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1516FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1517{
1518 IEMOP_MNEMONIC(clgi, "clgi");
1519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1520 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1521}
1522#else
1523FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1524#endif
1525
1526
1527/** Opcode 0x0f 0x01 0xdf. */
1528#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1529FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1530{
1531 IEMOP_MNEMONIC(invlpga, "invlpga");
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1533 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1534}
1535#else
1536FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1537#endif
1538
1539
1540/** Opcode 0x0f 0x01 0xde. */
1541#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1542FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1543{
1544 IEMOP_MNEMONIC(skinit, "skinit");
1545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1546 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1547}
1548#else
1549FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1550#endif
1551
1552
1553/** Opcode 0x0f 0x01 /4. */
1554FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1555{
1556 IEMOP_MNEMONIC(smsw, "smsw");
1557 IEMOP_HLP_MIN_286();
1558 if (IEM_IS_MODRM_REG_MODE(bRm))
1559 {
1560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1561 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1562 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1563 }
1564
1565 /* Ignore operand size here, memory refs are always 16-bit. */
1566 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1567 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1570 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1571 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1572 IEM_MC_END();
1573}
1574
1575
1576/** Opcode 0x0f 0x01 /6. */
1577FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1578{
1579 /* The operand size is effectively ignored, all is 16-bit and only the
1580 lower 3-bits are used. */
1581 IEMOP_MNEMONIC(lmsw, "lmsw");
1582 IEMOP_HLP_MIN_286();
1583 if (IEM_IS_MODRM_REG_MODE(bRm))
1584 {
1585 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1587 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1588 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1589 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1590 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1591 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1592 IEM_MC_END();
1593 }
1594 else
1595 {
1596 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1597 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1598 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1601 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1602 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1603 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1604 IEM_MC_END();
1605 }
1606}
1607
1608
1609/** Opcode 0x0f 0x01 /7. */
1610FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1611{
1612 IEMOP_MNEMONIC(invlpg, "invlpg");
1613 IEMOP_HLP_MIN_486();
1614 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1615 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1618 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1619 IEM_MC_END();
1620}
1621
1622
1623/** Opcode 0x0f 0x01 0xf8. */
1624FNIEMOP_DEF(iemOp_Grp7_swapgs)
1625{
1626 IEMOP_MNEMONIC(swapgs, "swapgs");
1627 IEMOP_HLP_ONLY_64BIT();
1628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1629 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1630}
1631
1632
1633/** Opcode 0x0f 0x01 0xf9. */
1634FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1635{
1636 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1638 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1639 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1640 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1641 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1642 iemCImpl_rdtscp);
1643}
1644
1645
1646/**
1647 * Group 7 jump table, memory variant.
1648 */
1649IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1650{
1651 iemOp_Grp7_sgdt,
1652 iemOp_Grp7_sidt,
1653 iemOp_Grp7_lgdt,
1654 iemOp_Grp7_lidt,
1655 iemOp_Grp7_smsw,
1656 iemOp_InvalidWithRM,
1657 iemOp_Grp7_lmsw,
1658 iemOp_Grp7_invlpg
1659};
1660
1661
1662/** Opcode 0x0f 0x01. */
1663FNIEMOP_DEF(iemOp_Grp7)
1664{
1665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1666 if (IEM_IS_MODRM_MEM_MODE(bRm))
1667 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1668
1669 switch (IEM_GET_MODRM_REG_8(bRm))
1670 {
1671 case 0:
1672 switch (IEM_GET_MODRM_RM_8(bRm))
1673 {
1674 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1675 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1676 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1677 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1678 }
1679 IEMOP_RAISE_INVALID_OPCODE_RET();
1680
1681 case 1:
1682 switch (IEM_GET_MODRM_RM_8(bRm))
1683 {
1684 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1685 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1686 }
1687 IEMOP_RAISE_INVALID_OPCODE_RET();
1688
1689 case 2:
1690 switch (IEM_GET_MODRM_RM_8(bRm))
1691 {
1692 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1693 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1694 }
1695 IEMOP_RAISE_INVALID_OPCODE_RET();
1696
1697 case 3:
1698 switch (IEM_GET_MODRM_RM_8(bRm))
1699 {
1700 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1701 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1702 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1703 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1704 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1705 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1706 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1707 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1709 }
1710
1711 case 4:
1712 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1713
1714 case 5:
1715 IEMOP_RAISE_INVALID_OPCODE_RET();
1716
1717 case 6:
1718 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1719
1720 case 7:
1721 switch (IEM_GET_MODRM_RM_8(bRm))
1722 {
1723 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1724 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1725 }
1726 IEMOP_RAISE_INVALID_OPCODE_RET();
1727
1728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1729 }
1730}
1731
1732FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1733{
1734 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1736
1737 if (IEM_IS_MODRM_REG_MODE(bRm))
1738 {
1739 switch (pVCpu->iem.s.enmEffOpSize)
1740 {
1741 case IEMMODE_16BIT:
1742 IEM_MC_BEGIN(0, 0);
1743 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1744 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1745 IEM_MC_ARG(uint16_t, u16Sel, 1);
1746 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1747
1748 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1749 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1750 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1751 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1752
1753 IEM_MC_END();
1754 break;
1755
1756 case IEMMODE_32BIT:
1757 case IEMMODE_64BIT:
1758 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1759 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1760 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1761 IEM_MC_ARG(uint16_t, u16Sel, 1);
1762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1763
1764 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1765 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1766 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1767 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1768
1769 IEM_MC_END();
1770 break;
1771
1772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1773 }
1774 }
1775 else
1776 {
1777 switch (pVCpu->iem.s.enmEffOpSize)
1778 {
1779 case IEMMODE_16BIT:
1780 IEM_MC_BEGIN(0, 0);
1781 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1782 IEM_MC_ARG(uint16_t, u16Sel, 1);
1783 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1785
1786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1787 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1788
1789 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1790 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1791 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1792 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1793
1794 IEM_MC_END();
1795 break;
1796
1797 case IEMMODE_32BIT:
1798 case IEMMODE_64BIT:
1799 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1800 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1801 IEM_MC_ARG(uint16_t, u16Sel, 1);
1802 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1804
1805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1806 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1807/** @todo testcase: make sure it's a 16-bit read. */
1808
1809 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1810 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1811 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1812 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1813
1814 IEM_MC_END();
1815 break;
1816
1817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1818 }
1819 }
1820}
1821
1822
1823
1824/**
1825 * @opcode 0x02
1826 * @opflmodify zf
1827 */
1828FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1829{
1830 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1831 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1832}
1833
1834
1835/**
1836 * @opcode 0x03
1837 * @opflmodify zf
1838 */
1839FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1840{
1841 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1842 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1843}
1844
1845
1846/** Opcode 0x0f 0x05. */
1847FNIEMOP_DEF(iemOp_syscall)
1848{
1849 if (RT_LIKELY(pVCpu->iem.s.uTargetCpu != IEMTARGETCPU_286))
1850 {
1851 IEMOP_MNEMONIC(syscall, "syscall");
1852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1853 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1854 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0, iemCImpl_syscall);
1855 }
1856 else
1857 {
1858 IEMOP_MNEMONIC(loadall286, "loadall286");
1859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1860 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1861 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1862 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_loadall286);
1863 }
1864}
1865
1866
1867/** Opcode 0x0f 0x06. */
1868FNIEMOP_DEF(iemOp_clts)
1869{
1870 IEMOP_MNEMONIC(clts, "clts");
1871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1872 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1873}
1874
1875
1876/** Opcode 0x0f 0x07. */
1877FNIEMOP_DEF(iemOp_sysret)
1878{
1879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1881 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1882 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1883 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1884}
1885
1886
1887/** Opcode 0x0f 0x08. */
1888FNIEMOP_DEF(iemOp_invd)
1889{
1890 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1891 IEMOP_HLP_MIN_486();
1892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1893 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1894}
1895
1896
1897/** Opcode 0x0f 0x09. */
1898FNIEMOP_DEF(iemOp_wbinvd)
1899{
1900 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1901 IEMOP_HLP_MIN_486();
1902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1903 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1904}
1905
1906
1907/** Opcode 0x0f 0x0b. */
1908FNIEMOP_DEF(iemOp_ud2)
1909{
1910 IEMOP_MNEMONIC(ud2, "ud2");
1911 IEMOP_RAISE_INVALID_OPCODE_RET();
1912}
1913
1914/** Opcode 0x0f 0x0d. */
1915FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1916{
1917 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1918 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1919 {
1920 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1921 IEMOP_RAISE_INVALID_OPCODE_RET();
1922 }
1923
1924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1925 if (IEM_IS_MODRM_REG_MODE(bRm))
1926 {
1927 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1928 IEMOP_RAISE_INVALID_OPCODE_RET();
1929 }
1930
1931 switch (IEM_GET_MODRM_REG_8(bRm))
1932 {
1933 case 2: /* Aliased to /0 for the time being. */
1934 case 4: /* Aliased to /0 for the time being. */
1935 case 5: /* Aliased to /0 for the time being. */
1936 case 6: /* Aliased to /0 for the time being. */
1937 case 7: /* Aliased to /0 for the time being. */
1938 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1939 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1940 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1942 }
1943
1944 IEM_MC_BEGIN(0, 0);
1945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1948 /* Currently a NOP. */
1949 IEM_MC_NOREF(GCPtrEffSrc);
1950 IEM_MC_ADVANCE_RIP_AND_FINISH();
1951 IEM_MC_END();
1952}
1953
1954
1955/** Opcode 0x0f 0x0e. */
1956FNIEMOP_DEF(iemOp_femms)
1957{
1958 IEMOP_MNEMONIC(femms, "femms");
1959
1960 IEM_MC_BEGIN(0, 0);
1961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1962 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1963 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1964 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1965 IEM_MC_FPU_FROM_MMX_MODE();
1966 IEM_MC_ADVANCE_RIP_AND_FINISH();
1967 IEM_MC_END();
1968}
1969
1970
1971/** Opcode 0x0f 0x0f. */
1972FNIEMOP_DEF(iemOp_3Dnow)
1973{
1974 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1975 {
1976 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1977 IEMOP_RAISE_INVALID_OPCODE_RET();
1978 }
1979
1980#ifdef IEM_WITH_3DNOW
1981 /* This is pretty sparse, use switch instead of table. */
1982 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1983 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1984#else
1985 IEMOP_BITCH_ABOUT_STUB();
1986 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1987#endif
1988}
1989
1990
1991/**
1992 * @opcode 0x10
1993 * @oppfx none
1994 * @opcpuid sse
1995 * @opgroup og_sse_simdfp_datamove
1996 * @opxcpttype 4UA
1997 * @optest op1=1 op2=2 -> op1=2
1998 * @optest op1=0 op2=-22 -> op1=-22
1999 */
2000FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2001{
2002 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if (IEM_IS_MODRM_REG_MODE(bRm))
2005 {
2006 /*
2007 * XMM128, XMM128.
2008 */
2009 IEM_MC_BEGIN(0, 0);
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2014 IEM_GET_MODRM_RM(pVCpu, bRm));
2015 IEM_MC_ADVANCE_RIP_AND_FINISH();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * XMM128, [mem128].
2022 */
2023 IEM_MC_BEGIN(0, 0);
2024 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2034
2035 IEM_MC_ADVANCE_RIP_AND_FINISH();
2036 IEM_MC_END();
2037 }
2038
2039}
2040
2041
2042/**
2043 * @opcode 0x10
2044 * @oppfx 0x66
2045 * @opcpuid sse2
2046 * @opgroup og_sse2_pcksclr_datamove
2047 * @opxcpttype 4UA
2048 * @optest op1=1 op2=2 -> op1=2
2049 * @optest op1=0 op2=-42 -> op1=-42
2050 */
2051FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2052{
2053 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2055 if (IEM_IS_MODRM_REG_MODE(bRm))
2056 {
2057 /*
2058 * XMM128, XMM128.
2059 */
2060 IEM_MC_BEGIN(0, 0);
2061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2062 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2063 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2064 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2065 IEM_GET_MODRM_RM(pVCpu, bRm));
2066 IEM_MC_ADVANCE_RIP_AND_FINISH();
2067 IEM_MC_END();
2068 }
2069 else
2070 {
2071 /*
2072 * XMM128, [mem128].
2073 */
2074 IEM_MC_BEGIN(0, 0);
2075 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077
2078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2080 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2081 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2082
2083 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2084 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2085
2086 IEM_MC_ADVANCE_RIP_AND_FINISH();
2087 IEM_MC_END();
2088 }
2089}
2090
2091
2092/**
2093 * @opcode 0x10
2094 * @oppfx 0xf3
2095 * @opcpuid sse
2096 * @opgroup og_sse_simdfp_datamove
2097 * @opxcpttype 5
2098 * @optest op1=1 op2=2 -> op1=2
2099 * @optest op1=0 op2=-22 -> op1=-22
2100 */
2101FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2102{
2103 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2105 if (IEM_IS_MODRM_REG_MODE(bRm))
2106 {
2107 /*
2108 * XMM32, XMM32.
2109 */
2110 IEM_MC_BEGIN(0, 0);
2111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2112 IEM_MC_LOCAL(uint32_t, uSrc);
2113
2114 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2115 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2116 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2117 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2118
2119 IEM_MC_ADVANCE_RIP_AND_FINISH();
2120 IEM_MC_END();
2121 }
2122 else
2123 {
2124 /*
2125 * XMM128, [mem32].
2126 */
2127 IEM_MC_BEGIN(0, 0);
2128 IEM_MC_LOCAL(uint32_t, uSrc);
2129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2130
2131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2133 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2135
2136 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2137 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2138
2139 IEM_MC_ADVANCE_RIP_AND_FINISH();
2140 IEM_MC_END();
2141 }
2142}
2143
2144
2145/**
2146 * @opcode 0x10
2147 * @oppfx 0xf2
2148 * @opcpuid sse2
2149 * @opgroup og_sse2_pcksclr_datamove
2150 * @opxcpttype 5
2151 * @optest op1=1 op2=2 -> op1=2
2152 * @optest op1=0 op2=-42 -> op1=-42
2153 */
2154FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2155{
2156 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2158 if (IEM_IS_MODRM_REG_MODE(bRm))
2159 {
2160 /*
2161 * XMM64, XMM64.
2162 */
2163 IEM_MC_BEGIN(0, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2165 IEM_MC_LOCAL(uint64_t, uSrc);
2166
2167 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2169 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2170 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2171
2172 IEM_MC_ADVANCE_RIP_AND_FINISH();
2173 IEM_MC_END();
2174 }
2175 else
2176 {
2177 /*
2178 * XMM128, [mem64].
2179 */
2180 IEM_MC_BEGIN(0, 0);
2181 IEM_MC_LOCAL(uint64_t, uSrc);
2182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2183
2184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2186 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2187 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2188
2189 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2190 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2191
2192 IEM_MC_ADVANCE_RIP_AND_FINISH();
2193 IEM_MC_END();
2194 }
2195}
2196
2197
2198/**
2199 * @opcode 0x11
2200 * @oppfx none
2201 * @opcpuid sse
2202 * @opgroup og_sse_simdfp_datamove
2203 * @opxcpttype 4UA
2204 * @optest op1=1 op2=2 -> op1=2
2205 * @optest op1=0 op2=-42 -> op1=-42
2206 */
2207FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2208{
2209 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2211 if (IEM_IS_MODRM_REG_MODE(bRm))
2212 {
2213 /*
2214 * XMM128, XMM128.
2215 */
2216 IEM_MC_BEGIN(0, 0);
2217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2218 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2219 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2220 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2221 IEM_GET_MODRM_REG(pVCpu, bRm));
2222 IEM_MC_ADVANCE_RIP_AND_FINISH();
2223 IEM_MC_END();
2224 }
2225 else
2226 {
2227 /*
2228 * [mem128], XMM128.
2229 */
2230 IEM_MC_BEGIN(0, 0);
2231 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2233
2234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2236 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2237 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2238
2239 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2240 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2241
2242 IEM_MC_ADVANCE_RIP_AND_FINISH();
2243 IEM_MC_END();
2244 }
2245}
2246
2247
2248/**
2249 * @opcode 0x11
2250 * @oppfx 0x66
2251 * @opcpuid sse2
2252 * @opgroup og_sse2_pcksclr_datamove
2253 * @opxcpttype 4UA
2254 * @optest op1=1 op2=2 -> op1=2
2255 * @optest op1=0 op2=-42 -> op1=-42
2256 */
2257FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2258{
2259 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2261 if (IEM_IS_MODRM_REG_MODE(bRm))
2262 {
2263 /*
2264 * XMM128, XMM128.
2265 */
2266 IEM_MC_BEGIN(0, 0);
2267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2268 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2269 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2270 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2271 IEM_GET_MODRM_REG(pVCpu, bRm));
2272 IEM_MC_ADVANCE_RIP_AND_FINISH();
2273 IEM_MC_END();
2274 }
2275 else
2276 {
2277 /*
2278 * [mem128], XMM128.
2279 */
2280 IEM_MC_BEGIN(0, 0);
2281 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2283
2284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2286 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2287 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2288
2289 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2290 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2291
2292 IEM_MC_ADVANCE_RIP_AND_FINISH();
2293 IEM_MC_END();
2294 }
2295}
2296
2297
2298/**
2299 * @opcode 0x11
2300 * @oppfx 0xf3
2301 * @opcpuid sse
2302 * @opgroup og_sse_simdfp_datamove
2303 * @opxcpttype 5
2304 * @optest op1=1 op2=2 -> op1=2
2305 * @optest op1=0 op2=-22 -> op1=-22
2306 */
2307FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2308{
2309 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2311 if (IEM_IS_MODRM_REG_MODE(bRm))
2312 {
2313 /*
2314 * XMM32, XMM32.
2315 */
2316 IEM_MC_BEGIN(0, 0);
2317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2318 IEM_MC_LOCAL(uint32_t, uSrc);
2319
2320 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2321 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2322 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2323 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2324
2325 IEM_MC_ADVANCE_RIP_AND_FINISH();
2326 IEM_MC_END();
2327 }
2328 else
2329 {
2330 /*
2331 * [mem32], XMM32.
2332 */
2333 IEM_MC_BEGIN(0, 0);
2334 IEM_MC_LOCAL(uint32_t, uSrc);
2335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2336
2337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2341
2342 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2343 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2344
2345 IEM_MC_ADVANCE_RIP_AND_FINISH();
2346 IEM_MC_END();
2347 }
2348}
2349
2350
2351/**
2352 * @opcode 0x11
2353 * @oppfx 0xf2
2354 * @opcpuid sse2
2355 * @opgroup og_sse2_pcksclr_datamove
2356 * @opxcpttype 5
2357 * @optest op1=1 op2=2 -> op1=2
2358 * @optest op1=0 op2=-42 -> op1=-42
2359 */
2360FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2361{
2362 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2364 if (IEM_IS_MODRM_REG_MODE(bRm))
2365 {
2366 /*
2367 * XMM64, XMM64.
2368 */
2369 IEM_MC_BEGIN(0, 0);
2370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2371 IEM_MC_LOCAL(uint64_t, uSrc);
2372
2373 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2374 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2375 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2376 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2377
2378 IEM_MC_ADVANCE_RIP_AND_FINISH();
2379 IEM_MC_END();
2380 }
2381 else
2382 {
2383 /*
2384 * [mem64], XMM64.
2385 */
2386 IEM_MC_BEGIN(0, 0);
2387 IEM_MC_LOCAL(uint64_t, uSrc);
2388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2389
2390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2392 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2393 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2394
2395 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2396 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2397
2398 IEM_MC_ADVANCE_RIP_AND_FINISH();
2399 IEM_MC_END();
2400 }
2401}
2402
2403
2404FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2405{
2406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2407 if (IEM_IS_MODRM_REG_MODE(bRm))
2408 {
2409 /**
2410 * @opcode 0x12
2411 * @opcodesub 11 mr/reg
2412 * @oppfx none
2413 * @opcpuid sse
2414 * @opgroup og_sse_simdfp_datamove
2415 * @opxcpttype 5
2416 * @optest op1=1 op2=2 -> op1=2
2417 * @optest op1=0 op2=-42 -> op1=-42
2418 */
2419 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2420
2421 IEM_MC_BEGIN(0, 0);
2422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2423 IEM_MC_LOCAL(uint64_t, uSrc);
2424
2425 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2426 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2427 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2428 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2429
2430 IEM_MC_ADVANCE_RIP_AND_FINISH();
2431 IEM_MC_END();
2432 }
2433 else
2434 {
2435 /**
2436 * @opdone
2437 * @opcode 0x12
2438 * @opcodesub !11 mr/reg
2439 * @oppfx none
2440 * @opcpuid sse
2441 * @opgroup og_sse_simdfp_datamove
2442 * @opxcpttype 5
2443 * @optest op1=1 op2=2 -> op1=2
2444 * @optest op1=0 op2=-42 -> op1=-42
2445 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2446 */
2447 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2448
2449 IEM_MC_BEGIN(0, 0);
2450 IEM_MC_LOCAL(uint64_t, uSrc);
2451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2452
2453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2455 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2456 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2457
2458 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2459 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2460
2461 IEM_MC_ADVANCE_RIP_AND_FINISH();
2462 IEM_MC_END();
2463 }
2464}
2465
2466
2467/**
2468 * @opcode 0x12
2469 * @opcodesub !11 mr/reg
2470 * @oppfx 0x66
2471 * @opcpuid sse2
2472 * @opgroup og_sse2_pcksclr_datamove
2473 * @opxcpttype 5
2474 * @optest op1=1 op2=2 -> op1=2
2475 * @optest op1=0 op2=-42 -> op1=-42
2476 */
2477FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2478{
2479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2480 if (IEM_IS_MODRM_MEM_MODE(bRm))
2481 {
2482 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2483
2484 IEM_MC_BEGIN(0, 0);
2485 IEM_MC_LOCAL(uint64_t, uSrc);
2486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2487
2488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2490 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2491 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2492
2493 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2494 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2495
2496 IEM_MC_ADVANCE_RIP_AND_FINISH();
2497 IEM_MC_END();
2498 }
2499
2500 /**
2501 * @opdone
2502 * @opmnemonic ud660f12m3
2503 * @opcode 0x12
2504 * @opcodesub 11 mr/reg
2505 * @oppfx 0x66
2506 * @opunused immediate
2507 * @opcpuid sse
2508 * @optest ->
2509 */
2510 else
2511 IEMOP_RAISE_INVALID_OPCODE_RET();
2512}
2513
2514
2515/**
2516 * @opcode 0x12
2517 * @oppfx 0xf3
2518 * @opcpuid sse3
2519 * @opgroup og_sse3_pcksclr_datamove
2520 * @opxcpttype 4
2521 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2522 * op1=0x00000002000000020000000100000001
2523 */
2524FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2525{
2526 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2528 if (IEM_IS_MODRM_REG_MODE(bRm))
2529 {
2530 /*
2531 * XMM, XMM.
2532 */
2533 IEM_MC_BEGIN(0, 0);
2534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2535 IEM_MC_LOCAL(RTUINT128U, uSrc);
2536
2537 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2538 IEM_MC_PREPARE_SSE_USAGE();
2539
2540 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2541 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2542 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2543 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2544 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2545
2546 IEM_MC_ADVANCE_RIP_AND_FINISH();
2547 IEM_MC_END();
2548 }
2549 else
2550 {
2551 /*
2552 * XMM, [mem128].
2553 */
2554 IEM_MC_BEGIN(0, 0);
2555 IEM_MC_LOCAL(RTUINT128U, uSrc);
2556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2557
2558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2560 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2561 IEM_MC_PREPARE_SSE_USAGE();
2562
2563 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2564 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2565 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2566 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2567 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2568
2569 IEM_MC_ADVANCE_RIP_AND_FINISH();
2570 IEM_MC_END();
2571 }
2572}
2573
2574
2575/**
2576 * @opcode 0x12
2577 * @oppfx 0xf2
2578 * @opcpuid sse3
2579 * @opgroup og_sse3_pcksclr_datamove
2580 * @opxcpttype 5
2581 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2582 * op1=0x22222222111111112222222211111111
2583 */
2584FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2585{
2586 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2588 if (IEM_IS_MODRM_REG_MODE(bRm))
2589 {
2590 /*
2591 * XMM128, XMM64.
2592 */
2593 IEM_MC_BEGIN(0, 0);
2594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2595 IEM_MC_LOCAL(uint64_t, uSrc);
2596
2597 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2598 IEM_MC_PREPARE_SSE_USAGE();
2599
2600 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2601 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2602 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2603
2604 IEM_MC_ADVANCE_RIP_AND_FINISH();
2605 IEM_MC_END();
2606 }
2607 else
2608 {
2609 /*
2610 * XMM128, [mem64].
2611 */
2612 IEM_MC_BEGIN(0, 0);
2613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2614 IEM_MC_LOCAL(uint64_t, uSrc);
2615
2616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2618 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2619 IEM_MC_PREPARE_SSE_USAGE();
2620
2621 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2622 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2623 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2624
2625 IEM_MC_ADVANCE_RIP_AND_FINISH();
2626 IEM_MC_END();
2627 }
2628}
2629
2630
2631/**
2632 * @opcode 0x13
2633 * @opcodesub !11 mr/reg
2634 * @oppfx none
2635 * @opcpuid sse
2636 * @opgroup og_sse_simdfp_datamove
2637 * @opxcpttype 5
2638 * @optest op1=1 op2=2 -> op1=2
2639 * @optest op1=0 op2=-42 -> op1=-42
2640 */
2641FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2642{
2643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2644 if (IEM_IS_MODRM_MEM_MODE(bRm))
2645 {
2646 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2647
2648 IEM_MC_BEGIN(0, 0);
2649 IEM_MC_LOCAL(uint64_t, uSrc);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2651
2652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2654 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2656
2657 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2658 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2659
2660 IEM_MC_ADVANCE_RIP_AND_FINISH();
2661 IEM_MC_END();
2662 }
2663
2664 /**
2665 * @opdone
2666 * @opmnemonic ud0f13m3
2667 * @opcode 0x13
2668 * @opcodesub 11 mr/reg
2669 * @oppfx none
2670 * @opunused immediate
2671 * @opcpuid sse
2672 * @optest ->
2673 */
2674 else
2675 IEMOP_RAISE_INVALID_OPCODE_RET();
2676}
2677
2678
2679/**
2680 * @opcode 0x13
2681 * @opcodesub !11 mr/reg
2682 * @oppfx 0x66
2683 * @opcpuid sse2
2684 * @opgroup og_sse2_pcksclr_datamove
2685 * @opxcpttype 5
2686 * @optest op1=1 op2=2 -> op1=2
2687 * @optest op1=0 op2=-42 -> op1=-42
2688 */
2689FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2690{
2691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2692 if (IEM_IS_MODRM_MEM_MODE(bRm))
2693 {
2694 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2695
2696 IEM_MC_BEGIN(0, 0);
2697 IEM_MC_LOCAL(uint64_t, uSrc);
2698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2699
2700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2702 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2703 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2704
2705 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2706 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2707
2708 IEM_MC_ADVANCE_RIP_AND_FINISH();
2709 IEM_MC_END();
2710 }
2711
2712 /**
2713 * @opdone
2714 * @opmnemonic ud660f13m3
2715 * @opcode 0x13
2716 * @opcodesub 11 mr/reg
2717 * @oppfx 0x66
2718 * @opunused immediate
2719 * @opcpuid sse
2720 * @optest ->
2721 */
2722 else
2723 IEMOP_RAISE_INVALID_OPCODE_RET();
2724}
2725
2726
2727/**
2728 * @opmnemonic udf30f13
2729 * @opcode 0x13
2730 * @oppfx 0xf3
2731 * @opunused intel-modrm
2732 * @opcpuid sse
2733 * @optest ->
2734 * @opdone
2735 */
2736
2737/**
2738 * @opmnemonic udf20f13
2739 * @opcode 0x13
2740 * @oppfx 0xf2
2741 * @opunused intel-modrm
2742 * @opcpuid sse
2743 * @optest ->
2744 * @opdone
2745 */
2746
2747/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2748FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2749{
2750 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2751 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2752}
2753
2754
2755/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2756FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2757{
2758 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2759 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2760}
2761
2762
2763/**
2764 * @opdone
2765 * @opmnemonic udf30f14
2766 * @opcode 0x14
2767 * @oppfx 0xf3
2768 * @opunused intel-modrm
2769 * @opcpuid sse
2770 * @optest ->
2771 * @opdone
2772 */
2773
2774/**
2775 * @opmnemonic udf20f14
2776 * @opcode 0x14
2777 * @oppfx 0xf2
2778 * @opunused intel-modrm
2779 * @opcpuid sse
2780 * @optest ->
2781 * @opdone
2782 */
2783
2784/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2785FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2786{
2787 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2788 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2789}
2790
2791
2792/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2793FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2794{
2795 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2796 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2797}
2798
2799
2800/* Opcode 0xf3 0x0f 0x15 - invalid */
2801/* Opcode 0xf2 0x0f 0x15 - invalid */
2802
2803/**
2804 * @opdone
2805 * @opmnemonic udf30f15
2806 * @opcode 0x15
2807 * @oppfx 0xf3
2808 * @opunused intel-modrm
2809 * @opcpuid sse
2810 * @optest ->
2811 * @opdone
2812 */
2813
2814/**
2815 * @opmnemonic udf20f15
2816 * @opcode 0x15
2817 * @oppfx 0xf2
2818 * @opunused intel-modrm
2819 * @opcpuid sse
2820 * @optest ->
2821 * @opdone
2822 */
2823
2824FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2825{
2826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2827 if (IEM_IS_MODRM_REG_MODE(bRm))
2828 {
2829 /**
2830 * @opcode 0x16
2831 * @opcodesub 11 mr/reg
2832 * @oppfx none
2833 * @opcpuid sse
2834 * @opgroup og_sse_simdfp_datamove
2835 * @opxcpttype 5
2836 * @optest op1=1 op2=2 -> op1=2
2837 * @optest op1=0 op2=-42 -> op1=-42
2838 */
2839 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2840
2841 IEM_MC_BEGIN(0, 0);
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2843 IEM_MC_LOCAL(uint64_t, uSrc);
2844
2845 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2846 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2847 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2848 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2849
2850 IEM_MC_ADVANCE_RIP_AND_FINISH();
2851 IEM_MC_END();
2852 }
2853 else
2854 {
2855 /**
2856 * @opdone
2857 * @opcode 0x16
2858 * @opcodesub !11 mr/reg
2859 * @oppfx none
2860 * @opcpuid sse
2861 * @opgroup og_sse_simdfp_datamove
2862 * @opxcpttype 5
2863 * @optest op1=1 op2=2 -> op1=2
2864 * @optest op1=0 op2=-42 -> op1=-42
2865 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2866 */
2867 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2868
2869 IEM_MC_BEGIN(0, 0);
2870 IEM_MC_LOCAL(uint64_t, uSrc);
2871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2872
2873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2875 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2876 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2877
2878 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2879 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2880
2881 IEM_MC_ADVANCE_RIP_AND_FINISH();
2882 IEM_MC_END();
2883 }
2884}
2885
2886
2887/**
2888 * @opcode 0x16
2889 * @opcodesub !11 mr/reg
2890 * @oppfx 0x66
2891 * @opcpuid sse2
2892 * @opgroup og_sse2_pcksclr_datamove
2893 * @opxcpttype 5
2894 * @optest op1=1 op2=2 -> op1=2
2895 * @optest op1=0 op2=-42 -> op1=-42
2896 */
2897FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2898{
2899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2900 if (IEM_IS_MODRM_MEM_MODE(bRm))
2901 {
2902 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2903
2904 IEM_MC_BEGIN(0, 0);
2905 IEM_MC_LOCAL(uint64_t, uSrc);
2906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2907
2908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2910 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2911 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2912
2913 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2914 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2915
2916 IEM_MC_ADVANCE_RIP_AND_FINISH();
2917 IEM_MC_END();
2918 }
2919
2920 /**
2921 * @opdone
2922 * @opmnemonic ud660f16m3
2923 * @opcode 0x16
2924 * @opcodesub 11 mr/reg
2925 * @oppfx 0x66
2926 * @opunused immediate
2927 * @opcpuid sse
2928 * @optest ->
2929 */
2930 else
2931 IEMOP_RAISE_INVALID_OPCODE_RET();
2932}
2933
2934
2935/**
2936 * @opcode 0x16
2937 * @oppfx 0xf3
2938 * @opcpuid sse3
2939 * @opgroup og_sse3_pcksclr_datamove
2940 * @opxcpttype 4
2941 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2942 * op1=0x00000002000000020000000100000001
2943 */
2944FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2945{
2946 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2948 if (IEM_IS_MODRM_REG_MODE(bRm))
2949 {
2950 /*
2951 * XMM128, XMM128.
2952 */
2953 IEM_MC_BEGIN(0, 0);
2954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2955 IEM_MC_LOCAL(RTUINT128U, uSrc);
2956
2957 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2958 IEM_MC_PREPARE_SSE_USAGE();
2959
2960 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2961 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2962 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2963 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2964 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2965
2966 IEM_MC_ADVANCE_RIP_AND_FINISH();
2967 IEM_MC_END();
2968 }
2969 else
2970 {
2971 /*
2972 * XMM128, [mem128].
2973 */
2974 IEM_MC_BEGIN(0, 0);
2975 IEM_MC_LOCAL(RTUINT128U, uSrc);
2976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2977
2978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2980 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2981 IEM_MC_PREPARE_SSE_USAGE();
2982
2983 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2984 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2985 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2986 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2987 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2988
2989 IEM_MC_ADVANCE_RIP_AND_FINISH();
2990 IEM_MC_END();
2991 }
2992}
2993
2994/**
2995 * @opdone
2996 * @opmnemonic udf30f16
2997 * @opcode 0x16
2998 * @oppfx 0xf2
2999 * @opunused intel-modrm
3000 * @opcpuid sse
3001 * @optest ->
3002 * @opdone
3003 */
3004
3005
3006/**
3007 * @opcode 0x17
3008 * @opcodesub !11 mr/reg
3009 * @oppfx none
3010 * @opcpuid sse
3011 * @opgroup og_sse_simdfp_datamove
3012 * @opxcpttype 5
3013 * @optest op1=1 op2=2 -> op1=2
3014 * @optest op1=0 op2=-42 -> op1=-42
3015 */
3016FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3017{
3018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3019 if (IEM_IS_MODRM_MEM_MODE(bRm))
3020 {
3021 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3022
3023 IEM_MC_BEGIN(0, 0);
3024 IEM_MC_LOCAL(uint64_t, uSrc);
3025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3026
3027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3031
3032 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3033 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3034
3035 IEM_MC_ADVANCE_RIP_AND_FINISH();
3036 IEM_MC_END();
3037 }
3038
3039 /**
3040 * @opdone
3041 * @opmnemonic ud0f17m3
3042 * @opcode 0x17
3043 * @opcodesub 11 mr/reg
3044 * @oppfx none
3045 * @opunused immediate
3046 * @opcpuid sse
3047 * @optest ->
3048 */
3049 else
3050 IEMOP_RAISE_INVALID_OPCODE_RET();
3051}
3052
3053
3054/**
3055 * @opcode 0x17
3056 * @opcodesub !11 mr/reg
3057 * @oppfx 0x66
3058 * @opcpuid sse2
3059 * @opgroup og_sse2_pcksclr_datamove
3060 * @opxcpttype 5
3061 * @optest op1=1 op2=2 -> op1=2
3062 * @optest op1=0 op2=-42 -> op1=-42
3063 */
3064FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3065{
3066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3067 if (IEM_IS_MODRM_MEM_MODE(bRm))
3068 {
3069 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3070
3071 IEM_MC_BEGIN(0, 0);
3072 IEM_MC_LOCAL(uint64_t, uSrc);
3073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3074
3075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3077 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3078 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3079
3080 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3081 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3082
3083 IEM_MC_ADVANCE_RIP_AND_FINISH();
3084 IEM_MC_END();
3085 }
3086
3087 /**
3088 * @opdone
3089 * @opmnemonic ud660f17m3
3090 * @opcode 0x17
3091 * @opcodesub 11 mr/reg
3092 * @oppfx 0x66
3093 * @opunused immediate
3094 * @opcpuid sse
3095 * @optest ->
3096 */
3097 else
3098 IEMOP_RAISE_INVALID_OPCODE_RET();
3099}
3100
3101
3102/**
3103 * @opdone
3104 * @opmnemonic udf30f17
3105 * @opcode 0x17
3106 * @oppfx 0xf3
3107 * @opunused intel-modrm
3108 * @opcpuid sse
3109 * @optest ->
3110 * @opdone
3111 */
3112
3113/**
3114 * @opmnemonic udf20f17
3115 * @opcode 0x17
3116 * @oppfx 0xf2
3117 * @opunused intel-modrm
3118 * @opcpuid sse
3119 * @optest ->
3120 * @opdone
3121 */
3122
3123
3124/** Opcode 0x0f 0x18. */
3125FNIEMOP_DEF(iemOp_prefetch_Grp16)
3126{
3127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3128 if (IEM_IS_MODRM_MEM_MODE(bRm))
3129 {
3130 switch (IEM_GET_MODRM_REG_8(bRm))
3131 {
3132 case 4: /* Aliased to /0 for the time being according to AMD. */
3133 case 5: /* Aliased to /0 for the time being according to AMD. */
3134 case 6: /* Aliased to /0 for the time being according to AMD. */
3135 case 7: /* Aliased to /0 for the time being according to AMD. */
3136 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3137 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3138 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3139 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3140 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3141 }
3142
3143 IEM_MC_BEGIN(0, 0);
3144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3147 /* Currently a NOP. */
3148 IEM_MC_NOREF(GCPtrEffSrc);
3149 IEM_MC_ADVANCE_RIP_AND_FINISH();
3150 IEM_MC_END();
3151 }
3152 else
3153 IEMOP_RAISE_INVALID_OPCODE_RET();
3154}
3155
3156
3157/** Opcode 0x0f 0x19..0x1f. */
3158FNIEMOP_DEF(iemOp_nop_Ev)
3159{
3160 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3162 if (IEM_IS_MODRM_REG_MODE(bRm))
3163 {
3164 IEM_MC_BEGIN(0, 0);
3165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3166 IEM_MC_ADVANCE_RIP_AND_FINISH();
3167 IEM_MC_END();
3168 }
3169 else
3170 {
3171 IEM_MC_BEGIN(0, 0);
3172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3175 /* Currently a NOP. */
3176 IEM_MC_NOREF(GCPtrEffSrc);
3177 IEM_MC_ADVANCE_RIP_AND_FINISH();
3178 IEM_MC_END();
3179 }
3180}
3181
3182
3183/** Opcode 0x0f 0x20. */
3184FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3185{
3186 /* mod is ignored, as is operand size overrides. */
3187/** @todo testcase: check memory encoding. */
3188 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3189 IEMOP_HLP_MIN_386();
3190 if (IEM_IS_64BIT_CODE(pVCpu))
3191 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3192 else
3193 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3194
3195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3196 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3197 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3198 {
3199 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3200 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3201 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3202 iCrReg |= 8;
3203 }
3204 switch (iCrReg)
3205 {
3206 case 0: case 2: case 3: case 4: case 8:
3207 break;
3208 default:
3209 IEMOP_RAISE_INVALID_OPCODE_RET();
3210 }
3211 IEMOP_HLP_DONE_DECODING();
3212
3213 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3214 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3215 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3216}
3217
3218
3219/** Opcode 0x0f 0x21. */
3220FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3221{
3222/** @todo testcase: check memory encoding. */
3223 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3224 IEMOP_HLP_MIN_386();
3225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3227 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3228 IEMOP_RAISE_INVALID_OPCODE_RET();
3229 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3230 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3231 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3232}
3233
3234
3235/** Opcode 0x0f 0x22. */
3236FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3237{
3238 /* mod is ignored, as is operand size overrides. */
3239 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3240 IEMOP_HLP_MIN_386();
3241 if (IEM_IS_64BIT_CODE(pVCpu))
3242 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3243 else
3244 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3245
3246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3247 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3248 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3249 {
3250 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3251 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3252 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3253 iCrReg |= 8;
3254 }
3255 switch (iCrReg)
3256 {
3257 case 0: case 2: case 3: case 4: case 8:
3258 break;
3259 default:
3260 IEMOP_RAISE_INVALID_OPCODE_RET();
3261 }
3262 IEMOP_HLP_DONE_DECODING();
3263
3264 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3265 if (iCrReg & (2 | 8))
3266 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3267 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3268 else
3269 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3270 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3271}
3272
3273
3274/** Opcode 0x0f 0x23. */
3275FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3276{
3277 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3278 IEMOP_HLP_MIN_386();
3279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3282 IEMOP_RAISE_INVALID_OPCODE_RET();
3283 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3284 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3285}
3286
3287
3288/** Opcode 0x0f 0x24. */
3289FNIEMOP_DEF(iemOp_mov_Rd_Td)
3290{
3291 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3292 IEMOP_HLP_MIN_386();
3293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3295 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3296 IEMOP_RAISE_INVALID_OPCODE_RET();
3297 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3298 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3299}
3300
3301
3302/** Opcode 0x0f 0x26. */
3303FNIEMOP_DEF(iemOp_mov_Td_Rd)
3304{
3305 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3306 IEMOP_HLP_MIN_386();
3307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3310 IEMOP_RAISE_INVALID_OPCODE_RET();
3311 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3312}
3313
3314
3315/**
3316 * @opcode 0x28
3317 * @oppfx none
3318 * @opcpuid sse
3319 * @opgroup og_sse_simdfp_datamove
3320 * @opxcpttype 1
3321 * @optest op1=1 op2=2 -> op1=2
3322 * @optest op1=0 op2=-42 -> op1=-42
3323 */
3324FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3325{
3326 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3328 if (IEM_IS_MODRM_REG_MODE(bRm))
3329 {
3330 /*
3331 * Register, register.
3332 */
3333 IEM_MC_BEGIN(0, 0);
3334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3335 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3336 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3337 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3338 IEM_GET_MODRM_RM(pVCpu, bRm));
3339 IEM_MC_ADVANCE_RIP_AND_FINISH();
3340 IEM_MC_END();
3341 }
3342 else
3343 {
3344 /*
3345 * Register, memory.
3346 */
3347 IEM_MC_BEGIN(0, 0);
3348 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3350
3351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3353 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3354 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3355
3356 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3357 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3358
3359 IEM_MC_ADVANCE_RIP_AND_FINISH();
3360 IEM_MC_END();
3361 }
3362}
3363
3364/**
3365 * @opcode 0x28
3366 * @oppfx 66
3367 * @opcpuid sse2
3368 * @opgroup og_sse2_pcksclr_datamove
3369 * @opxcpttype 1
3370 * @optest op1=1 op2=2 -> op1=2
3371 * @optest op1=0 op2=-42 -> op1=-42
3372 */
3373FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3374{
3375 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3377 if (IEM_IS_MODRM_REG_MODE(bRm))
3378 {
3379 /*
3380 * Register, register.
3381 */
3382 IEM_MC_BEGIN(0, 0);
3383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3385 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3386 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3387 IEM_GET_MODRM_RM(pVCpu, bRm));
3388 IEM_MC_ADVANCE_RIP_AND_FINISH();
3389 IEM_MC_END();
3390 }
3391 else
3392 {
3393 /*
3394 * Register, memory.
3395 */
3396 IEM_MC_BEGIN(0, 0);
3397 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3399
3400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3402 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3403 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3404
3405 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3406 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3407
3408 IEM_MC_ADVANCE_RIP_AND_FINISH();
3409 IEM_MC_END();
3410 }
3411}
3412
3413/* Opcode 0xf3 0x0f 0x28 - invalid */
3414/* Opcode 0xf2 0x0f 0x28 - invalid */
3415
3416/**
3417 * @opcode 0x29
3418 * @oppfx none
3419 * @opcpuid sse
3420 * @opgroup og_sse_simdfp_datamove
3421 * @opxcpttype 1
3422 * @optest op1=1 op2=2 -> op1=2
3423 * @optest op1=0 op2=-42 -> op1=-42
3424 */
3425FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3426{
3427 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3429 if (IEM_IS_MODRM_REG_MODE(bRm))
3430 {
3431 /*
3432 * Register, register.
3433 */
3434 IEM_MC_BEGIN(0, 0);
3435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3436 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3437 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3438 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3439 IEM_GET_MODRM_REG(pVCpu, bRm));
3440 IEM_MC_ADVANCE_RIP_AND_FINISH();
3441 IEM_MC_END();
3442 }
3443 else
3444 {
3445 /*
3446 * Memory, register.
3447 */
3448 IEM_MC_BEGIN(0, 0);
3449 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3451
3452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3454 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3455 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3456
3457 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3458 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3459
3460 IEM_MC_ADVANCE_RIP_AND_FINISH();
3461 IEM_MC_END();
3462 }
3463}
3464
3465/**
3466 * @opcode 0x29
3467 * @oppfx 66
3468 * @opcpuid sse2
3469 * @opgroup og_sse2_pcksclr_datamove
3470 * @opxcpttype 1
3471 * @optest op1=1 op2=2 -> op1=2
3472 * @optest op1=0 op2=-42 -> op1=-42
3473 */
3474FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3475{
3476 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3478 if (IEM_IS_MODRM_REG_MODE(bRm))
3479 {
3480 /*
3481 * Register, register.
3482 */
3483 IEM_MC_BEGIN(0, 0);
3484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3487 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3488 IEM_GET_MODRM_REG(pVCpu, bRm));
3489 IEM_MC_ADVANCE_RIP_AND_FINISH();
3490 IEM_MC_END();
3491 }
3492 else
3493 {
3494 /*
3495 * Memory, register.
3496 */
3497 IEM_MC_BEGIN(0, 0);
3498 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3500
3501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3503 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3504 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3505
3506 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3507 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3508
3509 IEM_MC_ADVANCE_RIP_AND_FINISH();
3510 IEM_MC_END();
3511 }
3512}
3513
3514/* Opcode 0xf3 0x0f 0x29 - invalid */
3515/* Opcode 0xf2 0x0f 0x29 - invalid */
3516
3517
3518/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3519FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3520{
3521 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3523 if (IEM_IS_MODRM_REG_MODE(bRm))
3524 {
3525 /*
3526 * XMM, MMX
3527 */
3528 IEM_MC_BEGIN(0, 0);
3529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3530 IEM_MC_LOCAL(X86XMMREG, Dst);
3531 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3532 IEM_MC_ARG(uint64_t, u64Src, 1);
3533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3534 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3535 IEM_MC_PREPARE_FPU_USAGE();
3536 IEM_MC_FPU_TO_MMX_MODE();
3537
3538 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3539 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3540
3541 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3542 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3543
3544 IEM_MC_ADVANCE_RIP_AND_FINISH();
3545 IEM_MC_END();
3546 }
3547 else
3548 {
3549 /*
3550 * XMM, [mem64]
3551 */
3552 IEM_MC_BEGIN(0, 0);
3553 IEM_MC_LOCAL(X86XMMREG, Dst);
3554 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3555 IEM_MC_ARG(uint64_t, u64Src, 1);
3556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3557
3558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3560 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3561 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3562 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3563
3564 IEM_MC_PREPARE_FPU_USAGE();
3565 IEM_MC_FPU_TO_MMX_MODE();
3566
3567 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3568 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3569
3570 IEM_MC_ADVANCE_RIP_AND_FINISH();
3571 IEM_MC_END();
3572 }
3573}
3574
3575
3576/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3577FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3578{
3579 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3581 if (IEM_IS_MODRM_REG_MODE(bRm))
3582 {
3583 /*
3584 * XMM, MMX
3585 */
3586 IEM_MC_BEGIN(0, 0);
3587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3588 IEM_MC_LOCAL(X86XMMREG, Dst);
3589 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3590 IEM_MC_ARG(uint64_t, u64Src, 1);
3591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3592 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3593 IEM_MC_PREPARE_FPU_USAGE();
3594 IEM_MC_FPU_TO_MMX_MODE();
3595
3596 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3597
3598 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3599 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3600
3601 IEM_MC_ADVANCE_RIP_AND_FINISH();
3602 IEM_MC_END();
3603 }
3604 else
3605 {
3606 /*
3607 * XMM, [mem64]
3608 */
3609 IEM_MC_BEGIN(0, 0);
3610 IEM_MC_LOCAL(X86XMMREG, Dst);
3611 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3612 IEM_MC_ARG(uint64_t, u64Src, 1);
3613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3614
3615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3617 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3618 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3619 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3620
3621 /* Doesn't cause a transition to MMX mode. */
3622 IEM_MC_PREPARE_SSE_USAGE();
3623
3624 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3625 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3626
3627 IEM_MC_ADVANCE_RIP_AND_FINISH();
3628 IEM_MC_END();
3629 }
3630}
3631
3632
3633/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3634FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3635{
3636 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3637
3638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3639 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3640 {
3641 if (IEM_IS_MODRM_REG_MODE(bRm))
3642 {
3643 /* XMM, greg64 */
3644 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3645 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3646 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3647 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3648
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3650 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3651 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3652
3653 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3654 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3655 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3656
3657 IEM_MC_ADVANCE_RIP_AND_FINISH();
3658 IEM_MC_END();
3659 }
3660 else
3661 {
3662 /* XMM, [mem64] */
3663 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3665 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3666 IEM_MC_LOCAL(int64_t, i64Src);
3667 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3668 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3669
3670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3673 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3674
3675 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3676 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3677 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3678
3679 IEM_MC_ADVANCE_RIP_AND_FINISH();
3680 IEM_MC_END();
3681 }
3682 }
3683 else
3684 {
3685 if (IEM_IS_MODRM_REG_MODE(bRm))
3686 {
3687 /* greg, XMM */
3688 IEM_MC_BEGIN(0, 0);
3689 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3690 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3691 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3692
3693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3694 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3695 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3696
3697 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3698 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3699 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3700
3701 IEM_MC_ADVANCE_RIP_AND_FINISH();
3702 IEM_MC_END();
3703 }
3704 else
3705 {
3706 /* greg, [mem32] */
3707 IEM_MC_BEGIN(0, 0);
3708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3709 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3710 IEM_MC_LOCAL(int32_t, i32Src);
3711 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3712 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3713
3714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3716 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3717 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3718
3719 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3720 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3721 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3722
3723 IEM_MC_ADVANCE_RIP_AND_FINISH();
3724 IEM_MC_END();
3725 }
3726 }
3727}
3728
3729
3730/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3731FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3732{
3733 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3734
3735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3736 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3737 {
3738 if (IEM_IS_MODRM_REG_MODE(bRm))
3739 {
3740 /* XMM, greg64 */
3741 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3742 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3743 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3744 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3745
3746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3747 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3748 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3749
3750 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3751 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3752 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3753
3754 IEM_MC_ADVANCE_RIP_AND_FINISH();
3755 IEM_MC_END();
3756 }
3757 else
3758 {
3759 /* XMM, [mem64] */
3760 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3762 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3763 IEM_MC_LOCAL(int64_t, i64Src);
3764 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3765 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3766
3767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3769 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3770 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3771
3772 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3773 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3774 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3775
3776 IEM_MC_ADVANCE_RIP_AND_FINISH();
3777 IEM_MC_END();
3778 }
3779 }
3780 else
3781 {
3782 if (IEM_IS_MODRM_REG_MODE(bRm))
3783 {
3784 /* XMM, greg32 */
3785 IEM_MC_BEGIN(0, 0);
3786 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3787 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3788 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3789
3790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3791 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3792 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3793
3794 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3795 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3796 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3797
3798 IEM_MC_ADVANCE_RIP_AND_FINISH();
3799 IEM_MC_END();
3800 }
3801 else
3802 {
3803 /* XMM, [mem32] */
3804 IEM_MC_BEGIN(0, 0);
3805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3806 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3807 IEM_MC_LOCAL(int32_t, i32Src);
3808 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3809 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3810
3811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3813 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3814 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3815
3816 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3817 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3818 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3819
3820 IEM_MC_ADVANCE_RIP_AND_FINISH();
3821 IEM_MC_END();
3822 }
3823 }
3824}
3825
3826
3827/**
3828 * @opcode 0x2b
3829 * @opcodesub !11 mr/reg
3830 * @oppfx none
3831 * @opcpuid sse
3832 * @opgroup og_sse1_cachect
3833 * @opxcpttype 1
3834 * @optest op1=1 op2=2 -> op1=2
3835 * @optest op1=0 op2=-42 -> op1=-42
3836 */
3837FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3838{
3839 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3841 if (IEM_IS_MODRM_MEM_MODE(bRm))
3842 {
3843 /*
3844 * memory, register.
3845 */
3846 IEM_MC_BEGIN(0, 0);
3847 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3849
3850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3853 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3854
3855 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3856 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3857
3858 IEM_MC_ADVANCE_RIP_AND_FINISH();
3859 IEM_MC_END();
3860 }
3861 /* The register, register encoding is invalid. */
3862 else
3863 IEMOP_RAISE_INVALID_OPCODE_RET();
3864}
3865
3866/**
3867 * @opcode 0x2b
3868 * @opcodesub !11 mr/reg
3869 * @oppfx 0x66
3870 * @opcpuid sse2
3871 * @opgroup og_sse2_cachect
3872 * @opxcpttype 1
3873 * @optest op1=1 op2=2 -> op1=2
3874 * @optest op1=0 op2=-42 -> op1=-42
3875 */
3876FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3877{
3878 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3880 if (IEM_IS_MODRM_MEM_MODE(bRm))
3881 {
3882 /*
3883 * memory, register.
3884 */
3885 IEM_MC_BEGIN(0, 0);
3886 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3888
3889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3891 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3892 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3893
3894 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3895 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3896
3897 IEM_MC_ADVANCE_RIP_AND_FINISH();
3898 IEM_MC_END();
3899 }
3900 /* The register, register encoding is invalid. */
3901 else
3902 IEMOP_RAISE_INVALID_OPCODE_RET();
3903}
3904/* Opcode 0xf3 0x0f 0x2b - invalid */
3905/* Opcode 0xf2 0x0f 0x2b - invalid */
3906
3907
3908/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3909FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3910{
3911 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3913 if (IEM_IS_MODRM_REG_MODE(bRm))
3914 {
3915 /*
3916 * Register, register.
3917 */
3918 IEM_MC_BEGIN(0, 0);
3919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3920 IEM_MC_LOCAL(uint64_t, u64Dst);
3921 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3922 IEM_MC_ARG(uint64_t, u64Src, 1);
3923 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3924 IEM_MC_PREPARE_FPU_USAGE();
3925 IEM_MC_FPU_TO_MMX_MODE();
3926
3927 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3928
3929 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3930 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3931
3932 IEM_MC_ADVANCE_RIP_AND_FINISH();
3933 IEM_MC_END();
3934 }
3935 else
3936 {
3937 /*
3938 * Register, memory.
3939 */
3940 IEM_MC_BEGIN(0, 0);
3941 IEM_MC_LOCAL(uint64_t, u64Dst);
3942 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3943 IEM_MC_ARG(uint64_t, u64Src, 1);
3944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3945
3946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3948 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3949 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3950
3951 IEM_MC_PREPARE_FPU_USAGE();
3952 IEM_MC_FPU_TO_MMX_MODE();
3953
3954 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3955 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3956
3957 IEM_MC_ADVANCE_RIP_AND_FINISH();
3958 IEM_MC_END();
3959 }
3960}
3961
3962
3963/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3964FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
3965{
3966 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3968 if (IEM_IS_MODRM_REG_MODE(bRm))
3969 {
3970 /*
3971 * Register, register.
3972 */
3973 IEM_MC_BEGIN(0, 0);
3974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3975 IEM_MC_LOCAL(uint64_t, u64Dst);
3976 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3977 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
3978 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3979 IEM_MC_PREPARE_FPU_USAGE();
3980 IEM_MC_FPU_TO_MMX_MODE();
3981
3982 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3983
3984 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
3985 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3986
3987 IEM_MC_ADVANCE_RIP_AND_FINISH();
3988 IEM_MC_END();
3989 }
3990 else
3991 {
3992 /*
3993 * Register, memory.
3994 */
3995 IEM_MC_BEGIN(0, 0);
3996 IEM_MC_LOCAL(uint64_t, u64Dst);
3997 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3998 IEM_MC_LOCAL(X86XMMREG, uSrc);
3999 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4001
4002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4004 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4005 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4006
4007 IEM_MC_PREPARE_FPU_USAGE();
4008 IEM_MC_FPU_TO_MMX_MODE();
4009
4010 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4011 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4012
4013 IEM_MC_ADVANCE_RIP_AND_FINISH();
4014 IEM_MC_END();
4015 }
4016}
4017
4018
4019/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4020FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4021{
4022 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4023
4024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4025 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4026 {
4027 if (IEM_IS_MODRM_REG_MODE(bRm))
4028 {
4029 /* greg64, XMM */
4030 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4031 IEM_MC_LOCAL(int64_t, i64Dst);
4032 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4033 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4034
4035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4036 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4037 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4038
4039 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4040 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4041 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4042
4043 IEM_MC_ADVANCE_RIP_AND_FINISH();
4044 IEM_MC_END();
4045 }
4046 else
4047 {
4048 /* greg64, [mem64] */
4049 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4051 IEM_MC_LOCAL(int64_t, i64Dst);
4052 IEM_MC_LOCAL(uint32_t, u32Src);
4053 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4054 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4055
4056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4058 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4059 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4060
4061 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4062 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4063 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4064
4065 IEM_MC_ADVANCE_RIP_AND_FINISH();
4066 IEM_MC_END();
4067 }
4068 }
4069 else
4070 {
4071 if (IEM_IS_MODRM_REG_MODE(bRm))
4072 {
4073 /* greg, XMM */
4074 IEM_MC_BEGIN(0, 0);
4075 IEM_MC_LOCAL(int32_t, i32Dst);
4076 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4077 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4078
4079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4080 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4081 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4082
4083 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4084 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4085 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4086
4087 IEM_MC_ADVANCE_RIP_AND_FINISH();
4088 IEM_MC_END();
4089 }
4090 else
4091 {
4092 /* greg, [mem] */
4093 IEM_MC_BEGIN(0, 0);
4094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4095 IEM_MC_LOCAL(int32_t, i32Dst);
4096 IEM_MC_LOCAL(uint32_t, u32Src);
4097 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4098 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4099
4100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4103 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4104
4105 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4106 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4107 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4108
4109 IEM_MC_ADVANCE_RIP_AND_FINISH();
4110 IEM_MC_END();
4111 }
4112 }
4113}
4114
4115
4116/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4117FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4118{
4119 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4120
4121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4122 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4123 {
4124 if (IEM_IS_MODRM_REG_MODE(bRm))
4125 {
4126 /* greg64, XMM */
4127 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4128 IEM_MC_LOCAL(int64_t, i64Dst);
4129 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4130 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4131
4132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4133 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4134 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4135
4136 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4137 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4138 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4139
4140 IEM_MC_ADVANCE_RIP_AND_FINISH();
4141 IEM_MC_END();
4142 }
4143 else
4144 {
4145 /* greg64, [mem64] */
4146 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4148 IEM_MC_LOCAL(int64_t, i64Dst);
4149 IEM_MC_LOCAL(uint64_t, u64Src);
4150 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4151 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4152
4153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4155 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4156 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4157
4158 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4159 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4160 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4161
4162 IEM_MC_ADVANCE_RIP_AND_FINISH();
4163 IEM_MC_END();
4164 }
4165 }
4166 else
4167 {
4168 if (IEM_IS_MODRM_REG_MODE(bRm))
4169 {
4170 /* greg, XMM */
4171 IEM_MC_BEGIN(0, 0);
4172 IEM_MC_LOCAL(int32_t, i32Dst);
4173 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4174 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4175
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4178 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4179
4180 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4181 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4182 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4183
4184 IEM_MC_ADVANCE_RIP_AND_FINISH();
4185 IEM_MC_END();
4186 }
4187 else
4188 {
4189 /* greg32, [mem32] */
4190 IEM_MC_BEGIN(0, 0);
4191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4192 IEM_MC_LOCAL(int32_t, i32Dst);
4193 IEM_MC_LOCAL(uint64_t, u64Src);
4194 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4195 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4196
4197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4199 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4200 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4201
4202 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4203 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4204 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4205
4206 IEM_MC_ADVANCE_RIP_AND_FINISH();
4207 IEM_MC_END();
4208 }
4209 }
4210}
4211
4212
4213/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4214FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4215{
4216 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4218 if (IEM_IS_MODRM_REG_MODE(bRm))
4219 {
4220 /*
4221 * Register, register.
4222 */
4223 IEM_MC_BEGIN(0, 0);
4224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4225 IEM_MC_LOCAL(uint64_t, u64Dst);
4226 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4227 IEM_MC_ARG(uint64_t, u64Src, 1);
4228
4229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4230 IEM_MC_PREPARE_FPU_USAGE();
4231 IEM_MC_FPU_TO_MMX_MODE();
4232
4233 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4234
4235 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4236 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4237
4238 IEM_MC_ADVANCE_RIP_AND_FINISH();
4239 IEM_MC_END();
4240 }
4241 else
4242 {
4243 /*
4244 * Register, memory.
4245 */
4246 IEM_MC_BEGIN(0, 0);
4247 IEM_MC_LOCAL(uint64_t, u64Dst);
4248 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4249 IEM_MC_ARG(uint64_t, u64Src, 1);
4250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4251
4252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4254 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4255 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4256
4257 IEM_MC_PREPARE_FPU_USAGE();
4258 IEM_MC_FPU_TO_MMX_MODE();
4259
4260 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4261 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4262
4263 IEM_MC_ADVANCE_RIP_AND_FINISH();
4264 IEM_MC_END();
4265 }
4266}
4267
4268
4269/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4270FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4271{
4272 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4274 if (IEM_IS_MODRM_REG_MODE(bRm))
4275 {
4276 /*
4277 * Register, register.
4278 */
4279 IEM_MC_BEGIN(0, 0);
4280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4281 IEM_MC_LOCAL(uint64_t, u64Dst);
4282 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4283 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4284
4285 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4286 IEM_MC_PREPARE_FPU_USAGE();
4287 IEM_MC_FPU_TO_MMX_MODE();
4288
4289 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4290
4291 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4292 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4293
4294 IEM_MC_ADVANCE_RIP_AND_FINISH();
4295 IEM_MC_END();
4296 }
4297 else
4298 {
4299 /*
4300 * Register, memory.
4301 */
4302 IEM_MC_BEGIN(0, 0);
4303 IEM_MC_LOCAL(uint64_t, u64Dst);
4304 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4305 IEM_MC_LOCAL(X86XMMREG, uSrc);
4306 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4308
4309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4311 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4312 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4313
4314 IEM_MC_PREPARE_FPU_USAGE();
4315 IEM_MC_FPU_TO_MMX_MODE();
4316
4317 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4318 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4319
4320 IEM_MC_ADVANCE_RIP_AND_FINISH();
4321 IEM_MC_END();
4322 }
4323}
4324
4325
4326/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4327FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4328{
4329 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4330
4331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4332 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4333 {
4334 if (IEM_IS_MODRM_REG_MODE(bRm))
4335 {
4336 /* greg64, XMM */
4337 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4338 IEM_MC_LOCAL(int64_t, i64Dst);
4339 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4340 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4341
4342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4343 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4344 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4345
4346 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4347 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4348 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4349
4350 IEM_MC_ADVANCE_RIP_AND_FINISH();
4351 IEM_MC_END();
4352 }
4353 else
4354 {
4355 /* greg64, [mem64] */
4356 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4358 IEM_MC_LOCAL(int64_t, i64Dst);
4359 IEM_MC_LOCAL(uint32_t, u32Src);
4360 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4361 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4362
4363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4365 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4366 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4367
4368 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4369 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4370 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4371
4372 IEM_MC_ADVANCE_RIP_AND_FINISH();
4373 IEM_MC_END();
4374 }
4375 }
4376 else
4377 {
4378 if (IEM_IS_MODRM_REG_MODE(bRm))
4379 {
4380 /* greg, XMM */
4381 IEM_MC_BEGIN(0, 0);
4382 IEM_MC_LOCAL(int32_t, i32Dst);
4383 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4384 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4385
4386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4387 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4388 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4389
4390 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4391 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4392 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4393
4394 IEM_MC_ADVANCE_RIP_AND_FINISH();
4395 IEM_MC_END();
4396 }
4397 else
4398 {
4399 /* greg, [mem] */
4400 IEM_MC_BEGIN(0, 0);
4401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4402 IEM_MC_LOCAL(int32_t, i32Dst);
4403 IEM_MC_LOCAL(uint32_t, u32Src);
4404 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4405 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4406
4407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4409 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4410 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4411
4412 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4413 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4414 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4415
4416 IEM_MC_ADVANCE_RIP_AND_FINISH();
4417 IEM_MC_END();
4418 }
4419 }
4420}
4421
4422
4423/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4424FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4425{
4426 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4427
4428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4429 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4430 {
4431 if (IEM_IS_MODRM_REG_MODE(bRm))
4432 {
4433 /* greg64, XMM */
4434 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4435 IEM_MC_LOCAL(int64_t, i64Dst);
4436 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4437 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4438
4439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4440 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4441 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4442
4443 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4444 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4445 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4446
4447 IEM_MC_ADVANCE_RIP_AND_FINISH();
4448 IEM_MC_END();
4449 }
4450 else
4451 {
4452 /* greg64, [mem64] */
4453 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4455 IEM_MC_LOCAL(int64_t, i64Dst);
4456 IEM_MC_LOCAL(uint64_t, u64Src);
4457 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4458 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4459
4460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4462 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4463 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4464
4465 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4466 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4467 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4468
4469 IEM_MC_ADVANCE_RIP_AND_FINISH();
4470 IEM_MC_END();
4471 }
4472 }
4473 else
4474 {
4475 if (IEM_IS_MODRM_REG_MODE(bRm))
4476 {
4477 /* greg32, XMM */
4478 IEM_MC_BEGIN(0, 0);
4479 IEM_MC_LOCAL(int32_t, i32Dst);
4480 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4481 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4482
4483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4484 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4485 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4486
4487 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4488 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4489 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4490
4491 IEM_MC_ADVANCE_RIP_AND_FINISH();
4492 IEM_MC_END();
4493 }
4494 else
4495 {
4496 /* greg32, [mem64] */
4497 IEM_MC_BEGIN(0, 0);
4498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4499 IEM_MC_LOCAL(int32_t, i32Dst);
4500 IEM_MC_LOCAL(uint64_t, u64Src);
4501 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4502 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4503
4504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4507 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4508
4509 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4510 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4511 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4512
4513 IEM_MC_ADVANCE_RIP_AND_FINISH();
4514 IEM_MC_END();
4515 }
4516 }
4517}
4518
4519
4520/**
4521 * @opcode 0x2e
4522 * @oppfx none
4523 * @opflmodify cf,pf,af,zf,sf,of
4524 * @opflclear af,sf,of
4525 */
4526FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4527{
4528 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4530 if (IEM_IS_MODRM_REG_MODE(bRm))
4531 {
4532 /*
4533 * Register, register.
4534 */
4535 IEM_MC_BEGIN(0, 0);
4536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4537 IEM_MC_LOCAL(uint32_t, fEFlags);
4538 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4539 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4540 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4541 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4542 IEM_MC_PREPARE_SSE_USAGE();
4543 IEM_MC_FETCH_EFLAGS(fEFlags);
4544 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4545 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4546 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4547 IEM_MC_COMMIT_EFLAGS(fEFlags);
4548
4549 IEM_MC_ADVANCE_RIP_AND_FINISH();
4550 IEM_MC_END();
4551 }
4552 else
4553 {
4554 /*
4555 * Register, memory.
4556 */
4557 IEM_MC_BEGIN(0, 0);
4558 IEM_MC_LOCAL(uint32_t, fEFlags);
4559 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4560 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4561 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4563
4564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4566 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4567 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4568
4569 IEM_MC_PREPARE_SSE_USAGE();
4570 IEM_MC_FETCH_EFLAGS(fEFlags);
4571 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4572 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4573 IEM_MC_COMMIT_EFLAGS(fEFlags);
4574
4575 IEM_MC_ADVANCE_RIP_AND_FINISH();
4576 IEM_MC_END();
4577 }
4578}
4579
4580
4581/**
4582 * @opcode 0x2e
4583 * @oppfx 0x66
4584 * @opflmodify cf,pf,af,zf,sf,of
4585 * @opflclear af,sf,of
4586 */
4587FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4588{
4589 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4591 if (IEM_IS_MODRM_REG_MODE(bRm))
4592 {
4593 /*
4594 * Register, register.
4595 */
4596 IEM_MC_BEGIN(0, 0);
4597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4598 IEM_MC_LOCAL(uint32_t, fEFlags);
4599 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4600 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4601 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4602 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4603 IEM_MC_PREPARE_SSE_USAGE();
4604 IEM_MC_FETCH_EFLAGS(fEFlags);
4605 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4606 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4607 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4608 IEM_MC_COMMIT_EFLAGS(fEFlags);
4609
4610 IEM_MC_ADVANCE_RIP_AND_FINISH();
4611 IEM_MC_END();
4612 }
4613 else
4614 {
4615 /*
4616 * Register, memory.
4617 */
4618 IEM_MC_BEGIN(0, 0);
4619 IEM_MC_LOCAL(uint32_t, fEFlags);
4620 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4621 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4622 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4624
4625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4627 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4628 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4629
4630 IEM_MC_PREPARE_SSE_USAGE();
4631 IEM_MC_FETCH_EFLAGS(fEFlags);
4632 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4633 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4634 IEM_MC_COMMIT_EFLAGS(fEFlags);
4635
4636 IEM_MC_ADVANCE_RIP_AND_FINISH();
4637 IEM_MC_END();
4638 }
4639}
4640
4641
4642/* Opcode 0xf3 0x0f 0x2e - invalid */
4643/* Opcode 0xf2 0x0f 0x2e - invalid */
4644
4645
4646/**
4647 * @opcode 0x2e
4648 * @oppfx none
4649 * @opflmodify cf,pf,af,zf,sf,of
4650 * @opflclear af,sf,of
4651 */
4652FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4653{
4654 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4656 if (IEM_IS_MODRM_REG_MODE(bRm))
4657 {
4658 /*
4659 * Register, register.
4660 */
4661 IEM_MC_BEGIN(0, 0);
4662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4663 IEM_MC_LOCAL(uint32_t, fEFlags);
4664 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4665 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4666 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4668 IEM_MC_PREPARE_SSE_USAGE();
4669 IEM_MC_FETCH_EFLAGS(fEFlags);
4670 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4671 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4672 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4673 IEM_MC_COMMIT_EFLAGS(fEFlags);
4674
4675 IEM_MC_ADVANCE_RIP_AND_FINISH();
4676 IEM_MC_END();
4677 }
4678 else
4679 {
4680 /*
4681 * Register, memory.
4682 */
4683 IEM_MC_BEGIN(0, 0);
4684 IEM_MC_LOCAL(uint32_t, fEFlags);
4685 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4686 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4687 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4689
4690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4692 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4693 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4694
4695 IEM_MC_PREPARE_SSE_USAGE();
4696 IEM_MC_FETCH_EFLAGS(fEFlags);
4697 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4698 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4699 IEM_MC_COMMIT_EFLAGS(fEFlags);
4700
4701 IEM_MC_ADVANCE_RIP_AND_FINISH();
4702 IEM_MC_END();
4703 }
4704}
4705
4706
4707/**
4708 * @opcode 0x2f
4709 * @oppfx 0x66
4710 * @opflmodify cf,pf,af,zf,sf,of
4711 * @opflclear af,sf,of
4712 */
4713FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4714{
4715 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4717 if (IEM_IS_MODRM_REG_MODE(bRm))
4718 {
4719 /*
4720 * Register, register.
4721 */
4722 IEM_MC_BEGIN(0, 0);
4723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4724 IEM_MC_LOCAL(uint32_t, fEFlags);
4725 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4726 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4727 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4728 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4729 IEM_MC_PREPARE_SSE_USAGE();
4730 IEM_MC_FETCH_EFLAGS(fEFlags);
4731 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4732 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4733 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4734 IEM_MC_COMMIT_EFLAGS(fEFlags);
4735
4736 IEM_MC_ADVANCE_RIP_AND_FINISH();
4737 IEM_MC_END();
4738 }
4739 else
4740 {
4741 /*
4742 * Register, memory.
4743 */
4744 IEM_MC_BEGIN(0, 0);
4745 IEM_MC_LOCAL(uint32_t, fEFlags);
4746 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4747 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4748 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4750
4751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4754 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4755
4756 IEM_MC_PREPARE_SSE_USAGE();
4757 IEM_MC_FETCH_EFLAGS(fEFlags);
4758 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4759 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4760 IEM_MC_COMMIT_EFLAGS(fEFlags);
4761
4762 IEM_MC_ADVANCE_RIP_AND_FINISH();
4763 IEM_MC_END();
4764 }
4765}
4766
4767
4768/* Opcode 0xf3 0x0f 0x2f - invalid */
4769/* Opcode 0xf2 0x0f 0x2f - invalid */
4770
4771/** Opcode 0x0f 0x30. */
4772FNIEMOP_DEF(iemOp_wrmsr)
4773{
4774 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4776 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4777}
4778
4779
4780/** Opcode 0x0f 0x31. */
4781FNIEMOP_DEF(iemOp_rdtsc)
4782{
4783 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4785 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4786 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4787 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4788 iemCImpl_rdtsc);
4789}
4790
4791
4792/** Opcode 0x0f 0x33. */
4793FNIEMOP_DEF(iemOp_rdmsr)
4794{
4795 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4797 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4798 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4799 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4800 iemCImpl_rdmsr);
4801}
4802
4803
4804/** Opcode 0x0f 0x34. */
4805FNIEMOP_DEF(iemOp_rdpmc)
4806{
4807 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4809 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4810 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4811 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4812 iemCImpl_rdpmc);
4813}
4814
4815
4816/** Opcode 0x0f 0x34. */
4817FNIEMOP_DEF(iemOp_sysenter)
4818{
4819 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4821 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4822 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4823 iemCImpl_sysenter);
4824}
4825
4826/** Opcode 0x0f 0x35. */
4827FNIEMOP_DEF(iemOp_sysexit)
4828{
4829 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4831 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4832 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4833 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4834}
4835
4836/** Opcode 0x0f 0x37. */
4837FNIEMOP_STUB(iemOp_getsec);
4838
4839
4840/** Opcode 0x0f 0x38. */
4841FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4842{
4843#ifdef IEM_WITH_THREE_0F_38
4844 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4845 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4846#else
4847 IEMOP_BITCH_ABOUT_STUB();
4848 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4849#endif
4850}
4851
4852
4853/** Opcode 0x0f 0x3a. */
4854FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4855{
4856#ifdef IEM_WITH_THREE_0F_3A
4857 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4858 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4859#else
4860 IEMOP_BITCH_ABOUT_STUB();
4861 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4862#endif
4863}
4864
4865
4866/**
4867 * Implements a conditional move.
4868 *
4869 * Wish there was an obvious way to do this where we could share and reduce
4870 * code bloat.
4871 *
4872 * @param a_Cnd The conditional "microcode" operation.
4873 */
4874#define CMOV_X(a_Cnd) \
4875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4876 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4877 { \
4878 switch (pVCpu->iem.s.enmEffOpSize) \
4879 { \
4880 case IEMMODE_16BIT: \
4881 IEM_MC_BEGIN(0, 0); \
4882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4883 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4884 a_Cnd { \
4885 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4886 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4887 } IEM_MC_ENDIF(); \
4888 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4889 IEM_MC_END(); \
4890 break; \
4891 \
4892 case IEMMODE_32BIT: \
4893 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4895 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4896 a_Cnd { \
4897 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4898 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4899 } IEM_MC_ELSE() { \
4900 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4901 } IEM_MC_ENDIF(); \
4902 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4903 IEM_MC_END(); \
4904 break; \
4905 \
4906 case IEMMODE_64BIT: \
4907 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4909 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4910 a_Cnd { \
4911 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4912 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4913 } IEM_MC_ENDIF(); \
4914 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4915 IEM_MC_END(); \
4916 break; \
4917 \
4918 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4919 } \
4920 } \
4921 else \
4922 { \
4923 switch (pVCpu->iem.s.enmEffOpSize) \
4924 { \
4925 case IEMMODE_16BIT: \
4926 IEM_MC_BEGIN(0, 0); \
4927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4928 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4931 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4932 a_Cnd { \
4933 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4934 } IEM_MC_ENDIF(); \
4935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4936 IEM_MC_END(); \
4937 break; \
4938 \
4939 case IEMMODE_32BIT: \
4940 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4942 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4945 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4946 a_Cnd { \
4947 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4948 } IEM_MC_ELSE() { \
4949 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4950 } IEM_MC_ENDIF(); \
4951 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4952 IEM_MC_END(); \
4953 break; \
4954 \
4955 case IEMMODE_64BIT: \
4956 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4958 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4961 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4962 a_Cnd { \
4963 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4964 } IEM_MC_ENDIF(); \
4965 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4966 IEM_MC_END(); \
4967 break; \
4968 \
4969 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4970 } \
4971 } do {} while (0)
4972
4973
4974
4975/**
4976 * @opcode 0x40
4977 * @opfltest of
4978 */
4979FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
4980{
4981 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
4982 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
4983}
4984
4985
4986/**
4987 * @opcode 0x41
4988 * @opfltest of
4989 */
4990FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
4991{
4992 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
4993 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
4994}
4995
4996
4997/**
4998 * @opcode 0x42
4999 * @opfltest cf
5000 */
5001FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5002{
5003 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5004 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5005}
5006
5007
5008/**
5009 * @opcode 0x43
5010 * @opfltest cf
5011 */
5012FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5013{
5014 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5015 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5016}
5017
5018
5019/**
5020 * @opcode 0x44
5021 * @opfltest zf
5022 */
5023FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5024{
5025 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5026 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5027}
5028
5029
5030/**
5031 * @opcode 0x45
5032 * @opfltest zf
5033 */
5034FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5035{
5036 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5037 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5038}
5039
5040
5041/**
5042 * @opcode 0x46
5043 * @opfltest cf,zf
5044 */
5045FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5046{
5047 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5048 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5049}
5050
5051
5052/**
5053 * @opcode 0x47
5054 * @opfltest cf,zf
5055 */
5056FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5057{
5058 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5059 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5060}
5061
5062
5063/**
5064 * @opcode 0x48
5065 * @opfltest sf
5066 */
5067FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5068{
5069 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5070 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5071}
5072
5073
5074/**
5075 * @opcode 0x49
5076 * @opfltest sf
5077 */
5078FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5079{
5080 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5081 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5082}
5083
5084
5085/**
5086 * @opcode 0x4a
5087 * @opfltest pf
5088 */
5089FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5090{
5091 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5092 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5093}
5094
5095
5096/**
5097 * @opcode 0x4b
5098 * @opfltest pf
5099 */
5100FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5101{
5102 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5103 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5104}
5105
5106
5107/**
5108 * @opcode 0x4c
5109 * @opfltest sf,of
5110 */
5111FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5112{
5113 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5114 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5115}
5116
5117
5118/**
5119 * @opcode 0x4d
5120 * @opfltest sf,of
5121 */
5122FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5123{
5124 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5125 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5126}
5127
5128
5129/**
5130 * @opcode 0x4e
5131 * @opfltest zf,sf,of
5132 */
5133FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5134{
5135 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5136 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5137}
5138
5139
5140/**
5141 * @opcode 0x4e
5142 * @opfltest zf,sf,of
5143 */
5144FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5145{
5146 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5147 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5148}
5149
5150#undef CMOV_X
5151
5152/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5153FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5154{
5155 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5157 if (IEM_IS_MODRM_REG_MODE(bRm))
5158 {
5159 /*
5160 * Register, register.
5161 */
5162 IEM_MC_BEGIN(0, 0);
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5164 IEM_MC_LOCAL(uint8_t, u8Dst);
5165 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5166 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5167 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5168 IEM_MC_PREPARE_SSE_USAGE();
5169 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5170 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5171 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5172 IEM_MC_ADVANCE_RIP_AND_FINISH();
5173 IEM_MC_END();
5174 }
5175 /* No memory operand. */
5176 else
5177 IEMOP_RAISE_INVALID_OPCODE_RET();
5178}
5179
5180
5181/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5182FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5183{
5184 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5186 if (IEM_IS_MODRM_REG_MODE(bRm))
5187 {
5188 /*
5189 * Register, register.
5190 */
5191 IEM_MC_BEGIN(0, 0);
5192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5193 IEM_MC_LOCAL(uint8_t, u8Dst);
5194 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5195 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5196 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5197 IEM_MC_PREPARE_SSE_USAGE();
5198 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5199 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5200 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5201 IEM_MC_ADVANCE_RIP_AND_FINISH();
5202 IEM_MC_END();
5203 }
5204 /* No memory operand. */
5205 else
5206 IEMOP_RAISE_INVALID_OPCODE_RET();
5207
5208}
5209
5210
5211/* Opcode 0xf3 0x0f 0x50 - invalid */
5212/* Opcode 0xf2 0x0f 0x50 - invalid */
5213
5214
5215/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5216FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5217{
5218 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5219 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5220}
5221
5222
5223/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5224FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5225{
5226 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5227 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5228}
5229
5230
5231/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5232FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5233{
5234 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5235 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5236}
5237
5238
5239/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5240FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5241{
5242 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5243 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5244}
5245
5246
5247/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5248FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5249{
5250 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5251 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5252}
5253
5254
5255/* Opcode 0x66 0x0f 0x52 - invalid */
5256
5257
5258/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5259FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5260{
5261 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5262 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5263}
5264
5265
5266/* Opcode 0xf2 0x0f 0x52 - invalid */
5267
5268
5269/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5270FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5271{
5272 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5273 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5274}
5275
5276
5277/* Opcode 0x66 0x0f 0x53 - invalid */
5278
5279
5280/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5281FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5282{
5283 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5284 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5285}
5286
5287
5288/* Opcode 0xf2 0x0f 0x53 - invalid */
5289
5290
5291/** Opcode 0x0f 0x54 - andps Vps, Wps */
5292FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5293{
5294 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5295 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5296}
5297
5298
5299/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5300FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5301{
5302 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5303 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5304}
5305
5306
5307/* Opcode 0xf3 0x0f 0x54 - invalid */
5308/* Opcode 0xf2 0x0f 0x54 - invalid */
5309
5310
5311/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5312FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5313{
5314 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5315 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5316}
5317
5318
5319/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5320FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5321{
5322 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5323 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5324}
5325
5326
5327/* Opcode 0xf3 0x0f 0x55 - invalid */
5328/* Opcode 0xf2 0x0f 0x55 - invalid */
5329
5330
5331/** Opcode 0x0f 0x56 - orps Vps, Wps */
5332FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5333{
5334 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5335 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5336}
5337
5338
5339/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5340FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5341{
5342 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5343 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5344}
5345
5346
5347/* Opcode 0xf3 0x0f 0x56 - invalid */
5348/* Opcode 0xf2 0x0f 0x56 - invalid */
5349
5350
5351/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5352FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5353{
5354 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5355 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5356}
5357
5358
5359/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5360FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5361{
5362 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5363 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5364}
5365
5366
5367/* Opcode 0xf3 0x0f 0x57 - invalid */
5368/* Opcode 0xf2 0x0f 0x57 - invalid */
5369
5370/** Opcode 0x0f 0x58 - addps Vps, Wps */
5371FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5372{
5373 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5374 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5375}
5376
5377
5378/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5379FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5380{
5381 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5382 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5383}
5384
5385
5386/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5387FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5388{
5389 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5390 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5391}
5392
5393
5394/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5395FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5396{
5397 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5398 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5399}
5400
5401
5402/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5403FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5404{
5405 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5406 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5407}
5408
5409
5410/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5411FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5412{
5413 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5414 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5415}
5416
5417
5418/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5419FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5420{
5421 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5422 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5423}
5424
5425
5426/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5427FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5428{
5429 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5430 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5431}
5432
5433
5434/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5435FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5436{
5437 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd_WO, Wps, DISOPTYPE_HARMLESS, 0);
5438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5439 if (IEM_IS_MODRM_REG_MODE(bRm))
5440 {
5441 /*
5442 * XMM, XMM[63:0].
5443 */
5444 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5446 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5447 IEM_MC_PREPARE_SSE_USAGE();
5448
5449 IEM_MC_LOCAL(X86XMMREG, SseRes);
5450 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5451 IEM_MC_ARG(uint64_t const *, pu64Src, 1); /* The input is actually two 32-bit float values, */
5452 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); /* but we've got no matching type or MC. */
5453 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5454 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5455
5456 IEM_MC_ADVANCE_RIP_AND_FINISH();
5457 IEM_MC_END();
5458 }
5459 else
5460 {
5461 /*
5462 * XMM, [mem64].
5463 */
5464 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5468 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5469
5470 IEM_MC_LOCAL(uint64_t, u64Src);
5471 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pu64Src, u64Src, 1); /* (see comment above wrt type) */
5472 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5473
5474 IEM_MC_PREPARE_SSE_USAGE();
5475 IEM_MC_LOCAL(X86XMMREG, SseRes);
5476 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5477 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5478 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5479
5480 IEM_MC_ADVANCE_RIP_AND_FINISH();
5481 IEM_MC_END();
5482 }
5483}
5484
5485
5486/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5487FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5488{
5489 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps_WO, Wpd, DISOPTYPE_HARMLESS, 0);
5490 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5491 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5492}
5493
5494
5495/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5496FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5497{
5498 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5499 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5500}
5501
5502
5503/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5504FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5505{
5506 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5507 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5508}
5509
5510
5511/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5512FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5513{
5514 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5515 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5516 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5517}
5518
5519
5520/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5521FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5522{
5523 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5524 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5525 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5526}
5527
5528
5529/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5530FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5531{
5532 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5533 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5534 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5535}
5536
5537
5538/* Opcode 0xf2 0x0f 0x5b - invalid */
5539
5540
5541/** Opcode 0x0f 0x5c - subps Vps, Wps */
5542FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5543{
5544 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5545 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5546}
5547
5548
5549/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5550FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5551{
5552 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5553 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5554}
5555
5556
5557/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5558FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5559{
5560 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5561 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5562}
5563
5564
5565/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5566FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5567{
5568 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5569 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5570}
5571
5572
5573/** Opcode 0x0f 0x5d - minps Vps, Wps */
5574FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5575{
5576 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5577 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5578}
5579
5580
5581/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5582FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5583{
5584 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5585 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5586}
5587
5588
5589/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5590FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5591{
5592 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5593 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5594}
5595
5596
5597/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5598FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5599{
5600 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5601 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5602}
5603
5604
5605/** Opcode 0x0f 0x5e - divps Vps, Wps */
5606FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5607{
5608 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5609 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5610}
5611
5612
5613/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5614FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5615{
5616 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5617 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5618}
5619
5620
5621/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5622FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5623{
5624 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5625 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5626}
5627
5628
5629/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5630FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5631{
5632 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5633 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5634}
5635
5636
5637/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5638FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5639{
5640 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5641 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5642}
5643
5644
5645/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5646FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5647{
5648 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5649 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5650}
5651
5652
5653/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5654FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5655{
5656 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5657 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5658}
5659
5660
5661/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5662FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5663{
5664 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5665 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5666}
5667
5668
5669/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5670FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5671{
5672 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5673 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5674}
5675
5676
5677/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5678FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5679{
5680 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5681 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5682}
5683
5684
5685/* Opcode 0xf3 0x0f 0x60 - invalid */
5686
5687
5688/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5689FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5690{
5691 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5692 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5693 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5694}
5695
5696
5697/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5698FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5699{
5700 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5701 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5702}
5703
5704
5705/* Opcode 0xf3 0x0f 0x61 - invalid */
5706
5707
5708/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5709FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5710{
5711 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5712 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5713}
5714
5715
5716/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5717FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5718{
5719 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5720 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5721}
5722
5723
5724/* Opcode 0xf3 0x0f 0x62 - invalid */
5725
5726
5727
5728/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5729FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5730{
5731 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5732 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5733}
5734
5735
5736/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5737FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5738{
5739 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5740 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5741}
5742
5743
5744/* Opcode 0xf3 0x0f 0x63 - invalid */
5745
5746
5747/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5748FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5749{
5750 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5751 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5752}
5753
5754
5755/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5756FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5757{
5758 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5759 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5760}
5761
5762
5763/* Opcode 0xf3 0x0f 0x64 - invalid */
5764
5765
5766/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5767FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5768{
5769 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5770 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5771}
5772
5773
5774/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5775FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5776{
5777 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5778 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5779}
5780
5781
5782/* Opcode 0xf3 0x0f 0x65 - invalid */
5783
5784
5785/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5786FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5787{
5788 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5789 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5790}
5791
5792
5793/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5794FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5795{
5796 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5797 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5798}
5799
5800
5801/* Opcode 0xf3 0x0f 0x66 - invalid */
5802
5803
5804/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5805FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5806{
5807 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5808 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5809}
5810
5811
5812/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5813FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5814{
5815 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5816 SSE2_OPT_BODY_FullFull_To_Full(packuswb, iemAImpl_packuswb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5817}
5818
5819
5820/* Opcode 0xf3 0x0f 0x67 - invalid */
5821
5822
5823/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5824 * @note Intel and AMD both uses Qd for the second parameter, however they
5825 * both list it as a mmX/mem64 operand and intel describes it as being
5826 * loaded as a qword, so it should be Qq, shouldn't it? */
5827FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5828{
5829 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5830 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5831}
5832
5833
5834/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5835FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5836{
5837 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5838 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5839}
5840
5841
5842/* Opcode 0xf3 0x0f 0x68 - invalid */
5843
5844
5845/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5846 * @note Intel and AMD both uses Qd for the second parameter, however they
5847 * both list it as a mmX/mem64 operand and intel describes it as being
5848 * loaded as a qword, so it should be Qq, shouldn't it? */
5849FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5850{
5851 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5852 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5853}
5854
5855
5856/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5857FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5858{
5859 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5860 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5861
5862}
5863
5864
5865/* Opcode 0xf3 0x0f 0x69 - invalid */
5866
5867
5868/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5869 * @note Intel and AMD both uses Qd for the second parameter, however they
5870 * both list it as a mmX/mem64 operand and intel describes it as being
5871 * loaded as a qword, so it should be Qq, shouldn't it? */
5872FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5873{
5874 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5875 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5876}
5877
5878
5879/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5880FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5881{
5882 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5883 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5884}
5885
5886
5887/* Opcode 0xf3 0x0f 0x6a - invalid */
5888
5889
5890/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5891FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5892{
5893 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5894 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5895}
5896
5897
5898/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5899FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5900{
5901 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5902 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5903}
5904
5905
5906/* Opcode 0xf3 0x0f 0x6b - invalid */
5907
5908
5909/* Opcode 0x0f 0x6c - invalid */
5910
5911
5912/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5913FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5914{
5915 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5916 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5917}
5918
5919
5920/* Opcode 0xf3 0x0f 0x6c - invalid */
5921/* Opcode 0xf2 0x0f 0x6c - invalid */
5922
5923
5924/* Opcode 0x0f 0x6d - invalid */
5925
5926
5927/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5928FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5929{
5930 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5931 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5932}
5933
5934
5935/* Opcode 0xf3 0x0f 0x6d - invalid */
5936
5937
5938FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
5939{
5940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5941 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5942 {
5943 /**
5944 * @opcode 0x6e
5945 * @opcodesub rex.w=1
5946 * @oppfx none
5947 * @opcpuid mmx
5948 * @opgroup og_mmx_datamove
5949 * @opxcpttype 5
5950 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5951 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5952 */
5953 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5954 if (IEM_IS_MODRM_REG_MODE(bRm))
5955 {
5956 /* MMX, greg64 */
5957 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5959 IEM_MC_LOCAL(uint64_t, u64Tmp);
5960
5961 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5962 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5963 IEM_MC_FPU_TO_MMX_MODE();
5964
5965 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5966 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5967
5968 IEM_MC_ADVANCE_RIP_AND_FINISH();
5969 IEM_MC_END();
5970 }
5971 else
5972 {
5973 /* MMX, [mem64] */
5974 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5976 IEM_MC_LOCAL(uint64_t, u64Tmp);
5977
5978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
5980 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5981 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5982
5983 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5984 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5985 IEM_MC_FPU_TO_MMX_MODE();
5986
5987 IEM_MC_ADVANCE_RIP_AND_FINISH();
5988 IEM_MC_END();
5989 }
5990 }
5991 else
5992 {
5993 /**
5994 * @opdone
5995 * @opcode 0x6e
5996 * @opcodesub rex.w=0
5997 * @oppfx none
5998 * @opcpuid mmx
5999 * @opgroup og_mmx_datamove
6000 * @opxcpttype 5
6001 * @opfunction iemOp_movd_q_Pd_Ey
6002 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6003 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6004 */
6005 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6006 if (IEM_IS_MODRM_REG_MODE(bRm))
6007 {
6008 /* MMX, greg32 */
6009 IEM_MC_BEGIN(0, 0);
6010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6011 IEM_MC_LOCAL(uint32_t, u32Tmp);
6012
6013 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6014 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6015 IEM_MC_FPU_TO_MMX_MODE();
6016
6017 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6018 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6019
6020 IEM_MC_ADVANCE_RIP_AND_FINISH();
6021 IEM_MC_END();
6022 }
6023 else
6024 {
6025 /* MMX, [mem32] */
6026 IEM_MC_BEGIN(0, 0);
6027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6028 IEM_MC_LOCAL(uint32_t, u32Tmp);
6029
6030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6032 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6034
6035 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6036 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6037 IEM_MC_FPU_TO_MMX_MODE();
6038
6039 IEM_MC_ADVANCE_RIP_AND_FINISH();
6040 IEM_MC_END();
6041 }
6042 }
6043}
6044
6045FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6046{
6047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6048 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6049 {
6050 /**
6051 * @opcode 0x6e
6052 * @opcodesub rex.w=1
6053 * @oppfx 0x66
6054 * @opcpuid sse2
6055 * @opgroup og_sse2_simdint_datamove
6056 * @opxcpttype 5
6057 * @optest 64-bit / op1=1 op2=2 -> op1=2
6058 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6059 */
6060 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6061 if (IEM_IS_MODRM_REG_MODE(bRm))
6062 {
6063 /* XMM, greg64 */
6064 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6066 IEM_MC_LOCAL(uint64_t, u64Tmp);
6067
6068 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6069 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6070
6071 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6072 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6073
6074 IEM_MC_ADVANCE_RIP_AND_FINISH();
6075 IEM_MC_END();
6076 }
6077 else
6078 {
6079 /* XMM, [mem64] */
6080 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6082 IEM_MC_LOCAL(uint64_t, u64Tmp);
6083
6084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6086 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6087 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6088
6089 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6090 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6091
6092 IEM_MC_ADVANCE_RIP_AND_FINISH();
6093 IEM_MC_END();
6094 }
6095 }
6096 else
6097 {
6098 /**
6099 * @opdone
6100 * @opcode 0x6e
6101 * @opcodesub rex.w=0
6102 * @oppfx 0x66
6103 * @opcpuid sse2
6104 * @opgroup og_sse2_simdint_datamove
6105 * @opxcpttype 5
6106 * @opfunction iemOp_movd_q_Vy_Ey
6107 * @optest op1=1 op2=2 -> op1=2
6108 * @optest op1=0 op2=-42 -> op1=-42
6109 */
6110 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6111 if (IEM_IS_MODRM_REG_MODE(bRm))
6112 {
6113 /* XMM, greg32 */
6114 IEM_MC_BEGIN(0, 0);
6115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6116 IEM_MC_LOCAL(uint32_t, u32Tmp);
6117
6118 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6119 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6120
6121 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6122 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6123
6124 IEM_MC_ADVANCE_RIP_AND_FINISH();
6125 IEM_MC_END();
6126 }
6127 else
6128 {
6129 /* XMM, [mem32] */
6130 IEM_MC_BEGIN(0, 0);
6131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6132 IEM_MC_LOCAL(uint32_t, u32Tmp);
6133
6134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6138
6139 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6140 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6141
6142 IEM_MC_ADVANCE_RIP_AND_FINISH();
6143 IEM_MC_END();
6144 }
6145 }
6146}
6147
6148/* Opcode 0xf3 0x0f 0x6e - invalid */
6149
6150
6151/**
6152 * @opcode 0x6f
6153 * @oppfx none
6154 * @opcpuid mmx
6155 * @opgroup og_mmx_datamove
6156 * @opxcpttype 5
6157 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6158 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6159 */
6160FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6161{
6162 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6164 if (IEM_IS_MODRM_REG_MODE(bRm))
6165 {
6166 /*
6167 * Register, register.
6168 */
6169 IEM_MC_BEGIN(0, 0);
6170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6171 IEM_MC_LOCAL(uint64_t, u64Tmp);
6172
6173 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6174 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6175 IEM_MC_FPU_TO_MMX_MODE();
6176
6177 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6178 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6179
6180 IEM_MC_ADVANCE_RIP_AND_FINISH();
6181 IEM_MC_END();
6182 }
6183 else
6184 {
6185 /*
6186 * Register, memory.
6187 */
6188 IEM_MC_BEGIN(0, 0);
6189 IEM_MC_LOCAL(uint64_t, u64Tmp);
6190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6191
6192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6194 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6195 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6196
6197 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6198 IEM_MC_FPU_TO_MMX_MODE();
6199
6200 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6201
6202 IEM_MC_ADVANCE_RIP_AND_FINISH();
6203 IEM_MC_END();
6204 }
6205}
6206
6207/**
6208 * @opcode 0x6f
6209 * @oppfx 0x66
6210 * @opcpuid sse2
6211 * @opgroup og_sse2_simdint_datamove
6212 * @opxcpttype 1
6213 * @optest op1=1 op2=2 -> op1=2
6214 * @optest op1=0 op2=-42 -> op1=-42
6215 */
6216FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6217{
6218 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6220 if (IEM_IS_MODRM_REG_MODE(bRm))
6221 {
6222 /*
6223 * Register, register.
6224 */
6225 IEM_MC_BEGIN(0, 0);
6226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6227
6228 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6229 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6230
6231 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6232 IEM_GET_MODRM_RM(pVCpu, bRm));
6233 IEM_MC_ADVANCE_RIP_AND_FINISH();
6234 IEM_MC_END();
6235 }
6236 else
6237 {
6238 /*
6239 * Register, memory.
6240 */
6241 IEM_MC_BEGIN(0, 0);
6242 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6244
6245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6247 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6248 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6249
6250 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6251 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6252
6253 IEM_MC_ADVANCE_RIP_AND_FINISH();
6254 IEM_MC_END();
6255 }
6256}
6257
6258/**
6259 * @opcode 0x6f
6260 * @oppfx 0xf3
6261 * @opcpuid sse2
6262 * @opgroup og_sse2_simdint_datamove
6263 * @opxcpttype 4UA
6264 * @optest op1=1 op2=2 -> op1=2
6265 * @optest op1=0 op2=-42 -> op1=-42
6266 */
6267FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6268{
6269 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6271 if (IEM_IS_MODRM_REG_MODE(bRm))
6272 {
6273 /*
6274 * Register, register.
6275 */
6276 IEM_MC_BEGIN(0, 0);
6277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6278 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6279 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6280 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6281 IEM_GET_MODRM_RM(pVCpu, bRm));
6282 IEM_MC_ADVANCE_RIP_AND_FINISH();
6283 IEM_MC_END();
6284 }
6285 else
6286 {
6287 /*
6288 * Register, memory.
6289 */
6290 IEM_MC_BEGIN(0, 0);
6291 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6293
6294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6296 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6297 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6298 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6299 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6300
6301 IEM_MC_ADVANCE_RIP_AND_FINISH();
6302 IEM_MC_END();
6303 }
6304}
6305
6306
6307/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6308FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6309{
6310 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6312 if (IEM_IS_MODRM_REG_MODE(bRm))
6313 {
6314 /*
6315 * Register, register.
6316 */
6317 IEM_MC_BEGIN(0, 0);
6318 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6320 IEM_MC_ARG(uint64_t *, pDst, 0);
6321 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6322 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6323 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6324 IEM_MC_PREPARE_FPU_USAGE();
6325 IEM_MC_FPU_TO_MMX_MODE();
6326
6327 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6328 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6329 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6330 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6331
6332 IEM_MC_ADVANCE_RIP_AND_FINISH();
6333 IEM_MC_END();
6334 }
6335 else
6336 {
6337 /*
6338 * Register, memory.
6339 */
6340 IEM_MC_BEGIN(0, 0);
6341 IEM_MC_ARG(uint64_t *, pDst, 0);
6342 IEM_MC_LOCAL(uint64_t, uSrc);
6343 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6345
6346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6347 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6348 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6350 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6351 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6352
6353 IEM_MC_PREPARE_FPU_USAGE();
6354 IEM_MC_FPU_TO_MMX_MODE();
6355
6356 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6357 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6358 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6359
6360 IEM_MC_ADVANCE_RIP_AND_FINISH();
6361 IEM_MC_END();
6362 }
6363}
6364
6365
6366/**
6367 * Common worker for SSE2 instructions on the forms:
6368 * pshufd xmm1, xmm2/mem128, imm8
6369 * pshufhw xmm1, xmm2/mem128, imm8
6370 * pshuflw xmm1, xmm2/mem128, imm8
6371 *
6372 * Proper alignment of the 128-bit operand is enforced.
6373 * Exceptions type 4. SSE2 cpuid checks.
6374 */
6375FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6376{
6377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6378 if (IEM_IS_MODRM_REG_MODE(bRm))
6379 {
6380 /*
6381 * Register, register.
6382 */
6383 IEM_MC_BEGIN(0, 0);
6384 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6386 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6387 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6388 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6389 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6390 IEM_MC_PREPARE_SSE_USAGE();
6391 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6392 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6393 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6394 IEM_MC_ADVANCE_RIP_AND_FINISH();
6395 IEM_MC_END();
6396 }
6397 else
6398 {
6399 /*
6400 * Register, memory.
6401 */
6402 IEM_MC_BEGIN(0, 0);
6403 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6404 IEM_MC_LOCAL(RTUINT128U, uSrc);
6405 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6407
6408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6409 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6410 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6412 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6413
6414 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6415 IEM_MC_PREPARE_SSE_USAGE();
6416 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6417 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6418
6419 IEM_MC_ADVANCE_RIP_AND_FINISH();
6420 IEM_MC_END();
6421 }
6422}
6423
6424
6425/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6426FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6427{
6428 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6429 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6430}
6431
6432
6433/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6434FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6435{
6436 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6437 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6438}
6439
6440
6441/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6442FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6443{
6444 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6445 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6446}
6447
6448
6449/**
6450 * Common worker for MMX instructions of the form:
6451 * psrlw mm, imm8
6452 * psraw mm, imm8
6453 * psllw mm, imm8
6454 * psrld mm, imm8
6455 * psrad mm, imm8
6456 * pslld mm, imm8
6457 * psrlq mm, imm8
6458 * psllq mm, imm8
6459 *
6460 */
6461FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6462{
6463 if (IEM_IS_MODRM_REG_MODE(bRm))
6464 {
6465 /*
6466 * Register, immediate.
6467 */
6468 IEM_MC_BEGIN(0, 0);
6469 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6471 IEM_MC_ARG(uint64_t *, pDst, 0);
6472 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6473 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6474 IEM_MC_PREPARE_FPU_USAGE();
6475 IEM_MC_FPU_TO_MMX_MODE();
6476
6477 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6478 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6479 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6480
6481 IEM_MC_ADVANCE_RIP_AND_FINISH();
6482 IEM_MC_END();
6483 }
6484 else
6485 {
6486 /*
6487 * Register, memory not supported.
6488 */
6489 /// @todo Caller already enforced register mode?!
6490 AssertFailedReturn(VINF_SUCCESS);
6491 }
6492}
6493
6494
6495#if 0 /*unused*/
6496/**
6497 * Common worker for SSE2 instructions of the form:
6498 * psrlw xmm, imm8
6499 * psraw xmm, imm8
6500 * psllw xmm, imm8
6501 * psrld xmm, imm8
6502 * psrad xmm, imm8
6503 * pslld xmm, imm8
6504 * psrlq xmm, imm8
6505 * psllq xmm, imm8
6506 *
6507 */
6508FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6509{
6510 if (IEM_IS_MODRM_REG_MODE(bRm))
6511 {
6512 /*
6513 * Register, immediate.
6514 */
6515 IEM_MC_BEGIN(0, 0);
6516 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6518 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6519 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6520 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6521 IEM_MC_PREPARE_SSE_USAGE();
6522 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6523 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6524 IEM_MC_ADVANCE_RIP_AND_FINISH();
6525 IEM_MC_END();
6526 }
6527 else
6528 {
6529 /*
6530 * Register, memory.
6531 */
6532 /// @todo Caller already enforced register mode?!
6533 AssertFailedReturn(VINF_SUCCESS);
6534 }
6535}
6536#endif
6537
6538
6539/**
6540 * Preprocessor macro variant of iemOpCommonSse2_Shift_Imm
6541 */
6542#define SSE2_SHIFT_BODY_Imm(a_Ins, a_bRm, a_fRegNativeArchs) \
6543 if (IEM_IS_MODRM_REG_MODE((a_bRm))) \
6544 { \
6545 /* \
6546 * Register, immediate. \
6547 */ \
6548 IEM_MC_BEGIN(0, 0); \
6549 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
6551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
6552 IEM_MC_PREPARE_SSE_USAGE(); \
6553 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
6554 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_ri_u128), IEM_GET_MODRM_RM(pVCpu, (a_bRm)), bImm); \
6555 } IEM_MC_NATIVE_ELSE() { \
6556 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
6557 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1); \
6558 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, (a_bRm))); \
6559 IEM_MC_CALL_VOID_AIMPL_2(RT_CONCAT3(iemAImpl_,a_Ins,_imm_u128), pDst, bShiftArg); \
6560 } IEM_MC_NATIVE_ENDIF(); \
6561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6562 IEM_MC_END(); \
6563 } \
6564 else \
6565 { \
6566 /* \
6567 * Register, memory. \
6568 */ \
6569 AssertFailedReturn(VINF_SUCCESS); \
6570 } (void)0
6571
6572
6573/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6574FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6575{
6576// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6577 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6578}
6579
6580
6581/** Opcode 0x66 0x0f 0x71 11/2. */
6582FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6583{
6584// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6585 SSE2_SHIFT_BODY_Imm(psrlw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6586}
6587
6588
6589/** Opcode 0x0f 0x71 11/4. */
6590FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6591{
6592// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6593 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6594}
6595
6596
6597/** Opcode 0x66 0x0f 0x71 11/4. */
6598FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6599{
6600// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6601 SSE2_SHIFT_BODY_Imm(psraw, bRm, 0);
6602}
6603
6604
6605/** Opcode 0x0f 0x71 11/6. */
6606FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6607{
6608// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6609 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6610}
6611
6612
6613/** Opcode 0x66 0x0f 0x71 11/6. */
6614FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6615{
6616// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6617 SSE2_SHIFT_BODY_Imm(psllw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6618}
6619
6620
6621/**
6622 * Group 12 jump table for register variant.
6623 */
6624IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6625{
6626 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6627 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6628 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6629 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6630 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6631 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6632 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6633 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6634};
6635AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6636
6637
6638/** Opcode 0x0f 0x71. */
6639FNIEMOP_DEF(iemOp_Grp12)
6640{
6641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6642 if (IEM_IS_MODRM_REG_MODE(bRm))
6643 /* register, register */
6644 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6645 + pVCpu->iem.s.idxPrefix], bRm);
6646 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6647}
6648
6649
6650/** Opcode 0x0f 0x72 11/2. */
6651FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6652{
6653// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6654 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6655}
6656
6657
6658/** Opcode 0x66 0x0f 0x72 11/2. */
6659FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6660{
6661// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6662 SSE2_SHIFT_BODY_Imm(psrld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6663}
6664
6665
6666/** Opcode 0x0f 0x72 11/4. */
6667FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6668{
6669// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6670 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6671}
6672
6673
6674/** Opcode 0x66 0x0f 0x72 11/4. */
6675FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6676{
6677// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6678 SSE2_SHIFT_BODY_Imm(psrad, bRm, 0);
6679}
6680
6681
6682/** Opcode 0x0f 0x72 11/6. */
6683FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6684{
6685// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6686 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6687}
6688
6689/** Opcode 0x66 0x0f 0x72 11/6. */
6690FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6691{
6692// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6693 SSE2_SHIFT_BODY_Imm(pslld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6694}
6695
6696
6697/**
6698 * Group 13 jump table for register variant.
6699 */
6700IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6701{
6702 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6703 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6704 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6705 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6706 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6707 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6708 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6709 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6710};
6711AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6712
6713/** Opcode 0x0f 0x72. */
6714FNIEMOP_DEF(iemOp_Grp13)
6715{
6716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6717 if (IEM_IS_MODRM_REG_MODE(bRm))
6718 /* register, register */
6719 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6720 + pVCpu->iem.s.idxPrefix], bRm);
6721 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6722}
6723
6724
6725/** Opcode 0x0f 0x73 11/2. */
6726FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6727{
6728// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6729 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6730}
6731
6732
6733/** Opcode 0x66 0x0f 0x73 11/2. */
6734FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6735{
6736// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6737 SSE2_SHIFT_BODY_Imm(psrlq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6738}
6739
6740
6741/** Opcode 0x66 0x0f 0x73 11/3. */
6742FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6743{
6744// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6745 SSE2_SHIFT_BODY_Imm(psrldq, bRm, 0);
6746}
6747
6748
6749/** Opcode 0x0f 0x73 11/6. */
6750FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6751{
6752// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6753 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6754}
6755
6756
6757/** Opcode 0x66 0x0f 0x73 11/6. */
6758FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6759{
6760// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6761 SSE2_SHIFT_BODY_Imm(psllq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6762}
6763
6764
6765/** Opcode 0x66 0x0f 0x73 11/7. */
6766FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6767{
6768// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6769 SSE2_SHIFT_BODY_Imm(pslldq, bRm, 0);
6770}
6771
6772/**
6773 * Group 14 jump table for register variant.
6774 */
6775IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6776{
6777 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6778 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6779 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6780 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6781 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6782 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6783 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6784 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6785};
6786AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6787
6788
6789/** Opcode 0x0f 0x73. */
6790FNIEMOP_DEF(iemOp_Grp14)
6791{
6792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6793 if (IEM_IS_MODRM_REG_MODE(bRm))
6794 /* register, register */
6795 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6796 + pVCpu->iem.s.idxPrefix], bRm);
6797 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6798}
6799
6800
6801/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6802FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6803{
6804 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6805 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6806}
6807
6808
6809/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6810FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6811{
6812 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6813 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqb, iemAImpl_pcmpeqb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6814}
6815
6816
6817/* Opcode 0xf3 0x0f 0x74 - invalid */
6818/* Opcode 0xf2 0x0f 0x74 - invalid */
6819
6820
6821/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6822FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6823{
6824 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6825 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6826}
6827
6828
6829/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6830FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6831{
6832 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6833 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqw, iemAImpl_pcmpeqw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6834}
6835
6836
6837/* Opcode 0xf3 0x0f 0x75 - invalid */
6838/* Opcode 0xf2 0x0f 0x75 - invalid */
6839
6840
6841/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6842FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6843{
6844 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6845 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6846}
6847
6848
6849/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6850FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6851{
6852 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6853 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqd, iemAImpl_pcmpeqd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6854}
6855
6856
6857/* Opcode 0xf3 0x0f 0x76 - invalid */
6858/* Opcode 0xf2 0x0f 0x76 - invalid */
6859
6860
6861/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6862FNIEMOP_DEF(iemOp_emms)
6863{
6864 IEMOP_MNEMONIC(emms, "emms");
6865 IEM_MC_BEGIN(0, 0);
6866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6869 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6870 IEM_MC_FPU_FROM_MMX_MODE();
6871 IEM_MC_ADVANCE_RIP_AND_FINISH();
6872 IEM_MC_END();
6873}
6874
6875/* Opcode 0x66 0x0f 0x77 - invalid */
6876/* Opcode 0xf3 0x0f 0x77 - invalid */
6877/* Opcode 0xf2 0x0f 0x77 - invalid */
6878
6879/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6880#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6881FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6882{
6883 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6884 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6885 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6886 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6887
6888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6889 if (IEM_IS_MODRM_REG_MODE(bRm))
6890 {
6891 /*
6892 * Register, register.
6893 */
6894 if (enmEffOpSize == IEMMODE_64BIT)
6895 {
6896 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6897 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6898 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6899 IEM_MC_ARG(uint64_t, u64Enc, 1);
6900 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6901 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6902 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6903 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6904 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6905 IEM_MC_END();
6906 }
6907 else
6908 {
6909 IEM_MC_BEGIN(0, 0);
6910 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6911 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6912 IEM_MC_ARG(uint32_t, u32Enc, 1);
6913 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6914 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6915 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6916 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6917 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6918 IEM_MC_END();
6919 }
6920 }
6921 else
6922 {
6923 /*
6924 * Memory, register.
6925 */
6926 if (enmEffOpSize == IEMMODE_64BIT)
6927 {
6928 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6929 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6931 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6932 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6933 IEM_MC_ARG(uint64_t, u64Enc, 2);
6934 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6935 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6936 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
6937 IEM_MC_END();
6938 }
6939 else
6940 {
6941 IEM_MC_BEGIN(0, 0);
6942 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6944 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6945 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6946 IEM_MC_ARG(uint32_t, u32Enc, 2);
6947 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6948 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6949 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
6950 IEM_MC_END();
6951 }
6952 }
6953}
6954#else
6955FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
6956#endif
6957
6958/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
6959FNIEMOP_STUB(iemOp_AmdGrp17);
6960/* Opcode 0xf3 0x0f 0x78 - invalid */
6961/* Opcode 0xf2 0x0f 0x78 - invalid */
6962
6963/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
6964#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6965FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
6966{
6967 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
6968 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
6969 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
6970 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6971
6972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6973 if (IEM_IS_MODRM_REG_MODE(bRm))
6974 {
6975 /*
6976 * Register, register.
6977 */
6978 if (enmEffOpSize == IEMMODE_64BIT)
6979 {
6980 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6981 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6982 IEM_MC_ARG(uint64_t, u64Val, 0);
6983 IEM_MC_ARG(uint64_t, u64Enc, 1);
6984 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6985 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6986 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
6987 IEM_MC_END();
6988 }
6989 else
6990 {
6991 IEM_MC_BEGIN(0, 0);
6992 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6993 IEM_MC_ARG(uint32_t, u32Val, 0);
6994 IEM_MC_ARG(uint32_t, u32Enc, 1);
6995 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6996 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6997 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
6998 IEM_MC_END();
6999 }
7000 }
7001 else
7002 {
7003 /*
7004 * Register, memory.
7005 */
7006 if (enmEffOpSize == IEMMODE_64BIT)
7007 {
7008 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7009 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7011 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7012 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7013 IEM_MC_ARG(uint64_t, u64Enc, 2);
7014 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7015 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7016 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7017 IEM_MC_END();
7018 }
7019 else
7020 {
7021 IEM_MC_BEGIN(0, 0);
7022 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7024 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7025 IEM_MC_ARG(uint32_t, u32Enc, 2);
7026 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7027 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7028 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7029 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7030 IEM_MC_END();
7031 }
7032 }
7033}
7034#else
7035FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
7036#endif
7037/* Opcode 0x66 0x0f 0x79 - invalid */
7038/* Opcode 0xf3 0x0f 0x79 - invalid */
7039/* Opcode 0xf2 0x0f 0x79 - invalid */
7040
7041/* Opcode 0x0f 0x7a - invalid */
7042/* Opcode 0x66 0x0f 0x7a - invalid */
7043/* Opcode 0xf3 0x0f 0x7a - invalid */
7044/* Opcode 0xf2 0x0f 0x7a - invalid */
7045
7046/* Opcode 0x0f 0x7b - invalid */
7047/* Opcode 0x66 0x0f 0x7b - invalid */
7048/* Opcode 0xf3 0x0f 0x7b - invalid */
7049/* Opcode 0xf2 0x0f 0x7b - invalid */
7050
7051/* Opcode 0x0f 0x7c - invalid */
7052
7053
7054/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7055FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7056{
7057 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7058 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7059}
7060
7061
7062/* Opcode 0xf3 0x0f 0x7c - invalid */
7063
7064
7065/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7066FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7067{
7068 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7069 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7070}
7071
7072
7073/* Opcode 0x0f 0x7d - invalid */
7074
7075
7076/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7077FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7078{
7079 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7080 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7081}
7082
7083
7084/* Opcode 0xf3 0x0f 0x7d - invalid */
7085
7086
7087/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7088FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7089{
7090 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7091 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7092}
7093
7094
7095/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7096FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7097{
7098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7099 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7100 {
7101 /**
7102 * @opcode 0x7e
7103 * @opcodesub rex.w=1
7104 * @oppfx none
7105 * @opcpuid mmx
7106 * @opgroup og_mmx_datamove
7107 * @opxcpttype 5
7108 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7109 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7110 */
7111 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7112 if (IEM_IS_MODRM_REG_MODE(bRm))
7113 {
7114 /* greg64, MMX */
7115 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7117 IEM_MC_LOCAL(uint64_t, u64Tmp);
7118
7119 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7120 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7121 IEM_MC_FPU_TO_MMX_MODE();
7122
7123 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7124 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7125
7126 IEM_MC_ADVANCE_RIP_AND_FINISH();
7127 IEM_MC_END();
7128 }
7129 else
7130 {
7131 /* [mem64], MMX */
7132 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7134 IEM_MC_LOCAL(uint64_t, u64Tmp);
7135
7136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7138 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7139 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7140
7141 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7142 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7143 IEM_MC_FPU_TO_MMX_MODE();
7144
7145 IEM_MC_ADVANCE_RIP_AND_FINISH();
7146 IEM_MC_END();
7147 }
7148 }
7149 else
7150 {
7151 /**
7152 * @opdone
7153 * @opcode 0x7e
7154 * @opcodesub rex.w=0
7155 * @oppfx none
7156 * @opcpuid mmx
7157 * @opgroup og_mmx_datamove
7158 * @opxcpttype 5
7159 * @opfunction iemOp_movd_q_Pd_Ey
7160 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7161 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7162 */
7163 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7164 if (IEM_IS_MODRM_REG_MODE(bRm))
7165 {
7166 /* greg32, MMX */
7167 IEM_MC_BEGIN(0, 0);
7168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7169 IEM_MC_LOCAL(uint32_t, u32Tmp);
7170
7171 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7172 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7173 IEM_MC_FPU_TO_MMX_MODE();
7174
7175 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7176 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7177
7178 IEM_MC_ADVANCE_RIP_AND_FINISH();
7179 IEM_MC_END();
7180 }
7181 else
7182 {
7183 /* [mem32], MMX */
7184 IEM_MC_BEGIN(0, 0);
7185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7186 IEM_MC_LOCAL(uint32_t, u32Tmp);
7187
7188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7190 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7191 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7192
7193 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7194 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7195 IEM_MC_FPU_TO_MMX_MODE();
7196
7197 IEM_MC_ADVANCE_RIP_AND_FINISH();
7198 IEM_MC_END();
7199 }
7200 }
7201}
7202
7203
7204FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7205{
7206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7207 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7208 {
7209 /**
7210 * @opcode 0x7e
7211 * @opcodesub rex.w=1
7212 * @oppfx 0x66
7213 * @opcpuid sse2
7214 * @opgroup og_sse2_simdint_datamove
7215 * @opxcpttype 5
7216 * @optest 64-bit / op1=1 op2=2 -> op1=2
7217 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7218 */
7219 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7220 if (IEM_IS_MODRM_REG_MODE(bRm))
7221 {
7222 /* greg64, XMM */
7223 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7225 IEM_MC_LOCAL(uint64_t, u64Tmp);
7226
7227 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7229
7230 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7231 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7232
7233 IEM_MC_ADVANCE_RIP_AND_FINISH();
7234 IEM_MC_END();
7235 }
7236 else
7237 {
7238 /* [mem64], XMM */
7239 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7241 IEM_MC_LOCAL(uint64_t, u64Tmp);
7242
7243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7245 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7246 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7247
7248 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7249 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7250
7251 IEM_MC_ADVANCE_RIP_AND_FINISH();
7252 IEM_MC_END();
7253 }
7254 }
7255 else
7256 {
7257 /**
7258 * @opdone
7259 * @opcode 0x7e
7260 * @opcodesub rex.w=0
7261 * @oppfx 0x66
7262 * @opcpuid sse2
7263 * @opgroup og_sse2_simdint_datamove
7264 * @opxcpttype 5
7265 * @opfunction iemOp_movd_q_Vy_Ey
7266 * @optest op1=1 op2=2 -> op1=2
7267 * @optest op1=0 op2=-42 -> op1=-42
7268 */
7269 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7270 if (IEM_IS_MODRM_REG_MODE(bRm))
7271 {
7272 /* greg32, XMM */
7273 IEM_MC_BEGIN(0, 0);
7274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7275 IEM_MC_LOCAL(uint32_t, u32Tmp);
7276
7277 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7278 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7279
7280 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7281 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7282
7283 IEM_MC_ADVANCE_RIP_AND_FINISH();
7284 IEM_MC_END();
7285 }
7286 else
7287 {
7288 /* [mem32], XMM */
7289 IEM_MC_BEGIN(0, 0);
7290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7291 IEM_MC_LOCAL(uint32_t, u32Tmp);
7292
7293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7295 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7296 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7297
7298 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7299 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7300
7301 IEM_MC_ADVANCE_RIP_AND_FINISH();
7302 IEM_MC_END();
7303 }
7304 }
7305}
7306
7307/**
7308 * @opcode 0x7e
7309 * @oppfx 0xf3
7310 * @opcpuid sse2
7311 * @opgroup og_sse2_pcksclr_datamove
7312 * @opxcpttype none
7313 * @optest op1=1 op2=2 -> op1=2
7314 * @optest op1=0 op2=-42 -> op1=-42
7315 */
7316FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7317{
7318 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7320 if (IEM_IS_MODRM_REG_MODE(bRm))
7321 {
7322 /*
7323 * XMM128, XMM64.
7324 */
7325 IEM_MC_BEGIN(0, 0);
7326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7327 IEM_MC_LOCAL(uint64_t, uSrc);
7328
7329 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7330 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7331
7332 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7333 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7334
7335 IEM_MC_ADVANCE_RIP_AND_FINISH();
7336 IEM_MC_END();
7337 }
7338 else
7339 {
7340 /*
7341 * XMM128, [mem64].
7342 */
7343 IEM_MC_BEGIN(0, 0);
7344 IEM_MC_LOCAL(uint64_t, uSrc);
7345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7346
7347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7350 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7351
7352 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7353 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7354
7355 IEM_MC_ADVANCE_RIP_AND_FINISH();
7356 IEM_MC_END();
7357 }
7358}
7359
7360/* Opcode 0xf2 0x0f 0x7e - invalid */
7361
7362
7363/** Opcode 0x0f 0x7f - movq Qq, Pq */
7364FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7365{
7366 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7368 if (IEM_IS_MODRM_REG_MODE(bRm))
7369 {
7370 /*
7371 * MMX, MMX.
7372 */
7373 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7374 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7375 IEM_MC_BEGIN(0, 0);
7376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7377 IEM_MC_LOCAL(uint64_t, u64Tmp);
7378 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7379 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7380 IEM_MC_FPU_TO_MMX_MODE();
7381
7382 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7383 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7384
7385 IEM_MC_ADVANCE_RIP_AND_FINISH();
7386 IEM_MC_END();
7387 }
7388 else
7389 {
7390 /*
7391 * [mem64], MMX.
7392 */
7393 IEM_MC_BEGIN(0, 0);
7394 IEM_MC_LOCAL(uint64_t, u64Tmp);
7395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7396
7397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7399 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7400 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7401
7402 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7403 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7404 IEM_MC_FPU_TO_MMX_MODE();
7405
7406 IEM_MC_ADVANCE_RIP_AND_FINISH();
7407 IEM_MC_END();
7408 }
7409}
7410
7411/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7412FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7413{
7414 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7416 if (IEM_IS_MODRM_REG_MODE(bRm))
7417 {
7418 /*
7419 * XMM, XMM.
7420 */
7421 IEM_MC_BEGIN(0, 0);
7422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7423 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7424 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7425 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7426 IEM_GET_MODRM_REG(pVCpu, bRm));
7427 IEM_MC_ADVANCE_RIP_AND_FINISH();
7428 IEM_MC_END();
7429 }
7430 else
7431 {
7432 /*
7433 * [mem128], XMM.
7434 */
7435 IEM_MC_BEGIN(0, 0);
7436 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7438
7439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7441 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7443
7444 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7445 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7446
7447 IEM_MC_ADVANCE_RIP_AND_FINISH();
7448 IEM_MC_END();
7449 }
7450}
7451
7452/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7453FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7454{
7455 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7457 if (IEM_IS_MODRM_REG_MODE(bRm))
7458 {
7459 /*
7460 * XMM, XMM.
7461 */
7462 IEM_MC_BEGIN(0, 0);
7463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7466 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7467 IEM_GET_MODRM_REG(pVCpu, bRm));
7468 IEM_MC_ADVANCE_RIP_AND_FINISH();
7469 IEM_MC_END();
7470 }
7471 else
7472 {
7473 /*
7474 * [mem128], XMM.
7475 */
7476 IEM_MC_BEGIN(0, 0);
7477 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7479
7480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7482 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7483 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7484
7485 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7486 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7487
7488 IEM_MC_ADVANCE_RIP_AND_FINISH();
7489 IEM_MC_END();
7490 }
7491}
7492
7493/* Opcode 0xf2 0x0f 0x7f - invalid */
7494
7495
7496/**
7497 * @opcode 0x80
7498 * @opfltest of
7499 */
7500FNIEMOP_DEF(iemOp_jo_Jv)
7501{
7502 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7503 IEMOP_HLP_MIN_386();
7504 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7505 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7506 {
7507 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7508 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7511 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7512 } IEM_MC_ELSE() {
7513 IEM_MC_ADVANCE_RIP_AND_FINISH();
7514 } IEM_MC_ENDIF();
7515 IEM_MC_END();
7516 }
7517 else
7518 {
7519 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7520 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7522 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7523 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7524 } IEM_MC_ELSE() {
7525 IEM_MC_ADVANCE_RIP_AND_FINISH();
7526 } IEM_MC_ENDIF();
7527 IEM_MC_END();
7528 }
7529}
7530
7531
7532/**
7533 * @opcode 0x81
7534 * @opfltest of
7535 */
7536FNIEMOP_DEF(iemOp_jno_Jv)
7537{
7538 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7539 IEMOP_HLP_MIN_386();
7540 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7541 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7542 {
7543 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7544 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7546 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7547 IEM_MC_ADVANCE_RIP_AND_FINISH();
7548 } IEM_MC_ELSE() {
7549 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7550 } IEM_MC_ENDIF();
7551 IEM_MC_END();
7552 }
7553 else
7554 {
7555 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7556 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7558 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7559 IEM_MC_ADVANCE_RIP_AND_FINISH();
7560 } IEM_MC_ELSE() {
7561 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7562 } IEM_MC_ENDIF();
7563 IEM_MC_END();
7564 }
7565}
7566
7567
7568/**
7569 * @opcode 0x82
7570 * @opfltest cf
7571 */
7572FNIEMOP_DEF(iemOp_jc_Jv)
7573{
7574 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7575 IEMOP_HLP_MIN_386();
7576 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7577 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7578 {
7579 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7580 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7583 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7584 } IEM_MC_ELSE() {
7585 IEM_MC_ADVANCE_RIP_AND_FINISH();
7586 } IEM_MC_ENDIF();
7587 IEM_MC_END();
7588 }
7589 else
7590 {
7591 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7592 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7595 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7596 } IEM_MC_ELSE() {
7597 IEM_MC_ADVANCE_RIP_AND_FINISH();
7598 } IEM_MC_ENDIF();
7599 IEM_MC_END();
7600 }
7601}
7602
7603
7604/**
7605 * @opcode 0x83
7606 * @opfltest cf
7607 */
7608FNIEMOP_DEF(iemOp_jnc_Jv)
7609{
7610 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7611 IEMOP_HLP_MIN_386();
7612 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7613 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7614 {
7615 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7616 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7618 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7619 IEM_MC_ADVANCE_RIP_AND_FINISH();
7620 } IEM_MC_ELSE() {
7621 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7622 } IEM_MC_ENDIF();
7623 IEM_MC_END();
7624 }
7625 else
7626 {
7627 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7628 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7630 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7631 IEM_MC_ADVANCE_RIP_AND_FINISH();
7632 } IEM_MC_ELSE() {
7633 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7634 } IEM_MC_ENDIF();
7635 IEM_MC_END();
7636 }
7637}
7638
7639
7640/**
7641 * @opcode 0x84
7642 * @opfltest zf
7643 */
7644FNIEMOP_DEF(iemOp_je_Jv)
7645{
7646 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7647 IEMOP_HLP_MIN_386();
7648 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7649 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7650 {
7651 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7652 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7655 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7656 } IEM_MC_ELSE() {
7657 IEM_MC_ADVANCE_RIP_AND_FINISH();
7658 } IEM_MC_ENDIF();
7659 IEM_MC_END();
7660 }
7661 else
7662 {
7663 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7664 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7666 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7667 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7668 } IEM_MC_ELSE() {
7669 IEM_MC_ADVANCE_RIP_AND_FINISH();
7670 } IEM_MC_ENDIF();
7671 IEM_MC_END();
7672 }
7673}
7674
7675
7676/**
7677 * @opcode 0x85
7678 * @opfltest zf
7679 */
7680FNIEMOP_DEF(iemOp_jne_Jv)
7681{
7682 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7683 IEMOP_HLP_MIN_386();
7684 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7685 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7686 {
7687 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7688 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7690 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7691 IEM_MC_ADVANCE_RIP_AND_FINISH();
7692 } IEM_MC_ELSE() {
7693 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7694 } IEM_MC_ENDIF();
7695 IEM_MC_END();
7696 }
7697 else
7698 {
7699 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7700 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7702 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7703 IEM_MC_ADVANCE_RIP_AND_FINISH();
7704 } IEM_MC_ELSE() {
7705 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7706 } IEM_MC_ENDIF();
7707 IEM_MC_END();
7708 }
7709}
7710
7711
7712/**
7713 * @opcode 0x86
7714 * @opfltest cf,zf
7715 */
7716FNIEMOP_DEF(iemOp_jbe_Jv)
7717{
7718 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7719 IEMOP_HLP_MIN_386();
7720 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7721 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7722 {
7723 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7724 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7726 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7727 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7728 } IEM_MC_ELSE() {
7729 IEM_MC_ADVANCE_RIP_AND_FINISH();
7730 } IEM_MC_ENDIF();
7731 IEM_MC_END();
7732 }
7733 else
7734 {
7735 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7736 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7738 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7739 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7740 } IEM_MC_ELSE() {
7741 IEM_MC_ADVANCE_RIP_AND_FINISH();
7742 } IEM_MC_ENDIF();
7743 IEM_MC_END();
7744 }
7745}
7746
7747
7748/**
7749 * @opcode 0x87
7750 * @opfltest cf,zf
7751 */
7752FNIEMOP_DEF(iemOp_jnbe_Jv)
7753{
7754 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7755 IEMOP_HLP_MIN_386();
7756 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7757 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7758 {
7759 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7760 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7762 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7763 IEM_MC_ADVANCE_RIP_AND_FINISH();
7764 } IEM_MC_ELSE() {
7765 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7766 } IEM_MC_ENDIF();
7767 IEM_MC_END();
7768 }
7769 else
7770 {
7771 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7772 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7774 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7775 IEM_MC_ADVANCE_RIP_AND_FINISH();
7776 } IEM_MC_ELSE() {
7777 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7778 } IEM_MC_ENDIF();
7779 IEM_MC_END();
7780 }
7781}
7782
7783
7784/**
7785 * @opcode 0x88
7786 * @opfltest sf
7787 */
7788FNIEMOP_DEF(iemOp_js_Jv)
7789{
7790 IEMOP_MNEMONIC(js_Jv, "js Jv");
7791 IEMOP_HLP_MIN_386();
7792 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7793 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7794 {
7795 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7796 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7799 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7800 } IEM_MC_ELSE() {
7801 IEM_MC_ADVANCE_RIP_AND_FINISH();
7802 } IEM_MC_ENDIF();
7803 IEM_MC_END();
7804 }
7805 else
7806 {
7807 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7808 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7810 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7811 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7812 } IEM_MC_ELSE() {
7813 IEM_MC_ADVANCE_RIP_AND_FINISH();
7814 } IEM_MC_ENDIF();
7815 IEM_MC_END();
7816 }
7817}
7818
7819
7820/**
7821 * @opcode 0x89
7822 * @opfltest sf
7823 */
7824FNIEMOP_DEF(iemOp_jns_Jv)
7825{
7826 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7827 IEMOP_HLP_MIN_386();
7828 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7829 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7830 {
7831 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7832 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7834 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7835 IEM_MC_ADVANCE_RIP_AND_FINISH();
7836 } IEM_MC_ELSE() {
7837 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7838 } IEM_MC_ENDIF();
7839 IEM_MC_END();
7840 }
7841 else
7842 {
7843 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7844 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7846 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7847 IEM_MC_ADVANCE_RIP_AND_FINISH();
7848 } IEM_MC_ELSE() {
7849 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7850 } IEM_MC_ENDIF();
7851 IEM_MC_END();
7852 }
7853}
7854
7855
7856/**
7857 * @opcode 0x8a
7858 * @opfltest pf
7859 */
7860FNIEMOP_DEF(iemOp_jp_Jv)
7861{
7862 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7863 IEMOP_HLP_MIN_386();
7864 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7865 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7866 {
7867 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7868 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7870 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7871 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7872 } IEM_MC_ELSE() {
7873 IEM_MC_ADVANCE_RIP_AND_FINISH();
7874 } IEM_MC_ENDIF();
7875 IEM_MC_END();
7876 }
7877 else
7878 {
7879 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7880 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7883 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7884 } IEM_MC_ELSE() {
7885 IEM_MC_ADVANCE_RIP_AND_FINISH();
7886 } IEM_MC_ENDIF();
7887 IEM_MC_END();
7888 }
7889}
7890
7891
7892/**
7893 * @opcode 0x8b
7894 * @opfltest pf
7895 */
7896FNIEMOP_DEF(iemOp_jnp_Jv)
7897{
7898 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7899 IEMOP_HLP_MIN_386();
7900 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7901 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7902 {
7903 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7904 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7906 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7907 IEM_MC_ADVANCE_RIP_AND_FINISH();
7908 } IEM_MC_ELSE() {
7909 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7910 } IEM_MC_ENDIF();
7911 IEM_MC_END();
7912 }
7913 else
7914 {
7915 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7916 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7918 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7919 IEM_MC_ADVANCE_RIP_AND_FINISH();
7920 } IEM_MC_ELSE() {
7921 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7922 } IEM_MC_ENDIF();
7923 IEM_MC_END();
7924 }
7925}
7926
7927
7928/**
7929 * @opcode 0x8c
7930 * @opfltest sf,of
7931 */
7932FNIEMOP_DEF(iemOp_jl_Jv)
7933{
7934 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7935 IEMOP_HLP_MIN_386();
7936 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7937 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7938 {
7939 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7940 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7942 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7943 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7944 } IEM_MC_ELSE() {
7945 IEM_MC_ADVANCE_RIP_AND_FINISH();
7946 } IEM_MC_ENDIF();
7947 IEM_MC_END();
7948 }
7949 else
7950 {
7951 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7952 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7954 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7955 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7956 } IEM_MC_ELSE() {
7957 IEM_MC_ADVANCE_RIP_AND_FINISH();
7958 } IEM_MC_ENDIF();
7959 IEM_MC_END();
7960 }
7961}
7962
7963
7964/**
7965 * @opcode 0x8d
7966 * @opfltest sf,of
7967 */
7968FNIEMOP_DEF(iemOp_jnl_Jv)
7969{
7970 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
7971 IEMOP_HLP_MIN_386();
7972 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7973 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7974 {
7975 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7976 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7978 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7979 IEM_MC_ADVANCE_RIP_AND_FINISH();
7980 } IEM_MC_ELSE() {
7981 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7982 } IEM_MC_ENDIF();
7983 IEM_MC_END();
7984 }
7985 else
7986 {
7987 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7988 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7990 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7991 IEM_MC_ADVANCE_RIP_AND_FINISH();
7992 } IEM_MC_ELSE() {
7993 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7994 } IEM_MC_ENDIF();
7995 IEM_MC_END();
7996 }
7997}
7998
7999
8000/**
8001 * @opcode 0x8e
8002 * @opfltest zf,sf,of
8003 */
8004FNIEMOP_DEF(iemOp_jle_Jv)
8005{
8006 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8007 IEMOP_HLP_MIN_386();
8008 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8009 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8010 {
8011 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8012 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8014 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8015 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8016 } IEM_MC_ELSE() {
8017 IEM_MC_ADVANCE_RIP_AND_FINISH();
8018 } IEM_MC_ENDIF();
8019 IEM_MC_END();
8020 }
8021 else
8022 {
8023 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8024 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8026 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8027 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8028 } IEM_MC_ELSE() {
8029 IEM_MC_ADVANCE_RIP_AND_FINISH();
8030 } IEM_MC_ENDIF();
8031 IEM_MC_END();
8032 }
8033}
8034
8035
8036/**
8037 * @opcode 0x8f
8038 * @opfltest zf,sf,of
8039 */
8040FNIEMOP_DEF(iemOp_jnle_Jv)
8041{
8042 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8043 IEMOP_HLP_MIN_386();
8044 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8045 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8046 {
8047 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8048 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8050 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8051 IEM_MC_ADVANCE_RIP_AND_FINISH();
8052 } IEM_MC_ELSE() {
8053 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8054 } IEM_MC_ENDIF();
8055 IEM_MC_END();
8056 }
8057 else
8058 {
8059 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8060 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8062 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8063 IEM_MC_ADVANCE_RIP_AND_FINISH();
8064 } IEM_MC_ELSE() {
8065 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8066 } IEM_MC_ENDIF();
8067 IEM_MC_END();
8068 }
8069}
8070
8071
8072/**
8073 * @opcode 0x90
8074 * @opfltest of
8075 */
8076FNIEMOP_DEF(iemOp_seto_Eb)
8077{
8078 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8079 IEMOP_HLP_MIN_386();
8080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8081
8082 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8083 * any way. AMD says it's "unused", whatever that means. We're
8084 * ignoring for now. */
8085 if (IEM_IS_MODRM_REG_MODE(bRm))
8086 {
8087 /* register target */
8088 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8090 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8091 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8092 } IEM_MC_ELSE() {
8093 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8094 } IEM_MC_ENDIF();
8095 IEM_MC_ADVANCE_RIP_AND_FINISH();
8096 IEM_MC_END();
8097 }
8098 else
8099 {
8100 /* memory target */
8101 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8106 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8107 } IEM_MC_ELSE() {
8108 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8109 } IEM_MC_ENDIF();
8110 IEM_MC_ADVANCE_RIP_AND_FINISH();
8111 IEM_MC_END();
8112 }
8113}
8114
8115
8116/**
8117 * @opcode 0x91
8118 * @opfltest of
8119 */
8120FNIEMOP_DEF(iemOp_setno_Eb)
8121{
8122 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8123 IEMOP_HLP_MIN_386();
8124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8125
8126 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8127 * any way. AMD says it's "unused", whatever that means. We're
8128 * ignoring for now. */
8129 if (IEM_IS_MODRM_REG_MODE(bRm))
8130 {
8131 /* register target */
8132 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8134 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8135 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8136 } IEM_MC_ELSE() {
8137 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8138 } IEM_MC_ENDIF();
8139 IEM_MC_ADVANCE_RIP_AND_FINISH();
8140 IEM_MC_END();
8141 }
8142 else
8143 {
8144 /* memory target */
8145 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8149 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8150 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8151 } IEM_MC_ELSE() {
8152 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8153 } IEM_MC_ENDIF();
8154 IEM_MC_ADVANCE_RIP_AND_FINISH();
8155 IEM_MC_END();
8156 }
8157}
8158
8159
8160/**
8161 * @opcode 0x92
8162 * @opfltest cf
8163 */
8164FNIEMOP_DEF(iemOp_setc_Eb)
8165{
8166 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8167 IEMOP_HLP_MIN_386();
8168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8169
8170 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8171 * any way. AMD says it's "unused", whatever that means. We're
8172 * ignoring for now. */
8173 if (IEM_IS_MODRM_REG_MODE(bRm))
8174 {
8175 /* register target */
8176 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8178 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8179 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8180 } IEM_MC_ELSE() {
8181 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8182 } IEM_MC_ENDIF();
8183 IEM_MC_ADVANCE_RIP_AND_FINISH();
8184 IEM_MC_END();
8185 }
8186 else
8187 {
8188 /* memory target */
8189 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8193 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8194 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8195 } IEM_MC_ELSE() {
8196 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8197 } IEM_MC_ENDIF();
8198 IEM_MC_ADVANCE_RIP_AND_FINISH();
8199 IEM_MC_END();
8200 }
8201}
8202
8203
8204/**
8205 * @opcode 0x93
8206 * @opfltest cf
8207 */
8208FNIEMOP_DEF(iemOp_setnc_Eb)
8209{
8210 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8211 IEMOP_HLP_MIN_386();
8212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8213
8214 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8215 * any way. AMD says it's "unused", whatever that means. We're
8216 * ignoring for now. */
8217 if (IEM_IS_MODRM_REG_MODE(bRm))
8218 {
8219 /* register target */
8220 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8222 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8223 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8224 } IEM_MC_ELSE() {
8225 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8226 } IEM_MC_ENDIF();
8227 IEM_MC_ADVANCE_RIP_AND_FINISH();
8228 IEM_MC_END();
8229 }
8230 else
8231 {
8232 /* memory target */
8233 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8237 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8238 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8239 } IEM_MC_ELSE() {
8240 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8241 } IEM_MC_ENDIF();
8242 IEM_MC_ADVANCE_RIP_AND_FINISH();
8243 IEM_MC_END();
8244 }
8245}
8246
8247
8248/**
8249 * @opcode 0x94
8250 * @opfltest zf
8251 */
8252FNIEMOP_DEF(iemOp_sete_Eb)
8253{
8254 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8255 IEMOP_HLP_MIN_386();
8256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8257
8258 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8259 * any way. AMD says it's "unused", whatever that means. We're
8260 * ignoring for now. */
8261 if (IEM_IS_MODRM_REG_MODE(bRm))
8262 {
8263 /* register target */
8264 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8266 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8267 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8268 } IEM_MC_ELSE() {
8269 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8270 } IEM_MC_ENDIF();
8271 IEM_MC_ADVANCE_RIP_AND_FINISH();
8272 IEM_MC_END();
8273 }
8274 else
8275 {
8276 /* memory target */
8277 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8281 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8282 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8283 } IEM_MC_ELSE() {
8284 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8285 } IEM_MC_ENDIF();
8286 IEM_MC_ADVANCE_RIP_AND_FINISH();
8287 IEM_MC_END();
8288 }
8289}
8290
8291
8292/**
8293 * @opcode 0x95
8294 * @opfltest zf
8295 */
8296FNIEMOP_DEF(iemOp_setne_Eb)
8297{
8298 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8299 IEMOP_HLP_MIN_386();
8300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8301
8302 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8303 * any way. AMD says it's "unused", whatever that means. We're
8304 * ignoring for now. */
8305 if (IEM_IS_MODRM_REG_MODE(bRm))
8306 {
8307 /* register target */
8308 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8310 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8311 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8312 } IEM_MC_ELSE() {
8313 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8314 } IEM_MC_ENDIF();
8315 IEM_MC_ADVANCE_RIP_AND_FINISH();
8316 IEM_MC_END();
8317 }
8318 else
8319 {
8320 /* memory target */
8321 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8325 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8326 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8327 } IEM_MC_ELSE() {
8328 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8329 } IEM_MC_ENDIF();
8330 IEM_MC_ADVANCE_RIP_AND_FINISH();
8331 IEM_MC_END();
8332 }
8333}
8334
8335
8336/**
8337 * @opcode 0x96
8338 * @opfltest cf,zf
8339 */
8340FNIEMOP_DEF(iemOp_setbe_Eb)
8341{
8342 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8343 IEMOP_HLP_MIN_386();
8344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8345
8346 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8347 * any way. AMD says it's "unused", whatever that means. We're
8348 * ignoring for now. */
8349 if (IEM_IS_MODRM_REG_MODE(bRm))
8350 {
8351 /* register target */
8352 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8354 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8355 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8356 } IEM_MC_ELSE() {
8357 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8358 } IEM_MC_ENDIF();
8359 IEM_MC_ADVANCE_RIP_AND_FINISH();
8360 IEM_MC_END();
8361 }
8362 else
8363 {
8364 /* memory target */
8365 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8369 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8370 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8371 } IEM_MC_ELSE() {
8372 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8373 } IEM_MC_ENDIF();
8374 IEM_MC_ADVANCE_RIP_AND_FINISH();
8375 IEM_MC_END();
8376 }
8377}
8378
8379
8380/**
8381 * @opcode 0x97
8382 * @opfltest cf,zf
8383 */
8384FNIEMOP_DEF(iemOp_setnbe_Eb)
8385{
8386 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8387 IEMOP_HLP_MIN_386();
8388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8389
8390 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8391 * any way. AMD says it's "unused", whatever that means. We're
8392 * ignoring for now. */
8393 if (IEM_IS_MODRM_REG_MODE(bRm))
8394 {
8395 /* register target */
8396 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8398 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8399 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8400 } IEM_MC_ELSE() {
8401 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8402 } IEM_MC_ENDIF();
8403 IEM_MC_ADVANCE_RIP_AND_FINISH();
8404 IEM_MC_END();
8405 }
8406 else
8407 {
8408 /* memory target */
8409 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8413 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8414 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8415 } IEM_MC_ELSE() {
8416 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8417 } IEM_MC_ENDIF();
8418 IEM_MC_ADVANCE_RIP_AND_FINISH();
8419 IEM_MC_END();
8420 }
8421}
8422
8423
8424/**
8425 * @opcode 0x98
8426 * @opfltest sf
8427 */
8428FNIEMOP_DEF(iemOp_sets_Eb)
8429{
8430 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8431 IEMOP_HLP_MIN_386();
8432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8433
8434 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8435 * any way. AMD says it's "unused", whatever that means. We're
8436 * ignoring for now. */
8437 if (IEM_IS_MODRM_REG_MODE(bRm))
8438 {
8439 /* register target */
8440 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8442 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8443 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8444 } IEM_MC_ELSE() {
8445 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8446 } IEM_MC_ENDIF();
8447 IEM_MC_ADVANCE_RIP_AND_FINISH();
8448 IEM_MC_END();
8449 }
8450 else
8451 {
8452 /* memory target */
8453 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8457 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8458 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8459 } IEM_MC_ELSE() {
8460 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8461 } IEM_MC_ENDIF();
8462 IEM_MC_ADVANCE_RIP_AND_FINISH();
8463 IEM_MC_END();
8464 }
8465}
8466
8467
8468/**
8469 * @opcode 0x99
8470 * @opfltest sf
8471 */
8472FNIEMOP_DEF(iemOp_setns_Eb)
8473{
8474 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8475 IEMOP_HLP_MIN_386();
8476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8477
8478 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8479 * any way. AMD says it's "unused", whatever that means. We're
8480 * ignoring for now. */
8481 if (IEM_IS_MODRM_REG_MODE(bRm))
8482 {
8483 /* register target */
8484 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8486 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8487 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8488 } IEM_MC_ELSE() {
8489 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8490 } IEM_MC_ENDIF();
8491 IEM_MC_ADVANCE_RIP_AND_FINISH();
8492 IEM_MC_END();
8493 }
8494 else
8495 {
8496 /* memory target */
8497 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8501 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8502 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8503 } IEM_MC_ELSE() {
8504 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8505 } IEM_MC_ENDIF();
8506 IEM_MC_ADVANCE_RIP_AND_FINISH();
8507 IEM_MC_END();
8508 }
8509}
8510
8511
8512/**
8513 * @opcode 0x9a
8514 * @opfltest pf
8515 */
8516FNIEMOP_DEF(iemOp_setp_Eb)
8517{
8518 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8519 IEMOP_HLP_MIN_386();
8520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8521
8522 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8523 * any way. AMD says it's "unused", whatever that means. We're
8524 * ignoring for now. */
8525 if (IEM_IS_MODRM_REG_MODE(bRm))
8526 {
8527 /* register target */
8528 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8530 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8531 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8532 } IEM_MC_ELSE() {
8533 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8534 } IEM_MC_ENDIF();
8535 IEM_MC_ADVANCE_RIP_AND_FINISH();
8536 IEM_MC_END();
8537 }
8538 else
8539 {
8540 /* memory target */
8541 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8545 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8546 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8547 } IEM_MC_ELSE() {
8548 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8549 } IEM_MC_ENDIF();
8550 IEM_MC_ADVANCE_RIP_AND_FINISH();
8551 IEM_MC_END();
8552 }
8553}
8554
8555
8556/**
8557 * @opcode 0x9b
8558 * @opfltest pf
8559 */
8560FNIEMOP_DEF(iemOp_setnp_Eb)
8561{
8562 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8563 IEMOP_HLP_MIN_386();
8564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8565
8566 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8567 * any way. AMD says it's "unused", whatever that means. We're
8568 * ignoring for now. */
8569 if (IEM_IS_MODRM_REG_MODE(bRm))
8570 {
8571 /* register target */
8572 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8575 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8576 } IEM_MC_ELSE() {
8577 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8578 } IEM_MC_ENDIF();
8579 IEM_MC_ADVANCE_RIP_AND_FINISH();
8580 IEM_MC_END();
8581 }
8582 else
8583 {
8584 /* memory target */
8585 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8589 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8590 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8591 } IEM_MC_ELSE() {
8592 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8593 } IEM_MC_ENDIF();
8594 IEM_MC_ADVANCE_RIP_AND_FINISH();
8595 IEM_MC_END();
8596 }
8597}
8598
8599
8600/**
8601 * @opcode 0x9c
8602 * @opfltest sf,of
8603 */
8604FNIEMOP_DEF(iemOp_setl_Eb)
8605{
8606 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8607 IEMOP_HLP_MIN_386();
8608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8609
8610 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8611 * any way. AMD says it's "unused", whatever that means. We're
8612 * ignoring for now. */
8613 if (IEM_IS_MODRM_REG_MODE(bRm))
8614 {
8615 /* register target */
8616 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8618 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8619 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8620 } IEM_MC_ELSE() {
8621 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8622 } IEM_MC_ENDIF();
8623 IEM_MC_ADVANCE_RIP_AND_FINISH();
8624 IEM_MC_END();
8625 }
8626 else
8627 {
8628 /* memory target */
8629 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8633 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8634 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8635 } IEM_MC_ELSE() {
8636 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8637 } IEM_MC_ENDIF();
8638 IEM_MC_ADVANCE_RIP_AND_FINISH();
8639 IEM_MC_END();
8640 }
8641}
8642
8643
8644/**
8645 * @opcode 0x9d
8646 * @opfltest sf,of
8647 */
8648FNIEMOP_DEF(iemOp_setnl_Eb)
8649{
8650 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8651 IEMOP_HLP_MIN_386();
8652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8653
8654 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8655 * any way. AMD says it's "unused", whatever that means. We're
8656 * ignoring for now. */
8657 if (IEM_IS_MODRM_REG_MODE(bRm))
8658 {
8659 /* register target */
8660 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8662 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8663 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8664 } IEM_MC_ELSE() {
8665 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8666 } IEM_MC_ENDIF();
8667 IEM_MC_ADVANCE_RIP_AND_FINISH();
8668 IEM_MC_END();
8669 }
8670 else
8671 {
8672 /* memory target */
8673 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8677 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8678 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8679 } IEM_MC_ELSE() {
8680 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8681 } IEM_MC_ENDIF();
8682 IEM_MC_ADVANCE_RIP_AND_FINISH();
8683 IEM_MC_END();
8684 }
8685}
8686
8687
8688/**
8689 * @opcode 0x9e
8690 * @opfltest zf,sf,of
8691 */
8692FNIEMOP_DEF(iemOp_setle_Eb)
8693{
8694 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8695 IEMOP_HLP_MIN_386();
8696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8697
8698 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8699 * any way. AMD says it's "unused", whatever that means. We're
8700 * ignoring for now. */
8701 if (IEM_IS_MODRM_REG_MODE(bRm))
8702 {
8703 /* register target */
8704 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8706 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8707 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8708 } IEM_MC_ELSE() {
8709 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8710 } IEM_MC_ENDIF();
8711 IEM_MC_ADVANCE_RIP_AND_FINISH();
8712 IEM_MC_END();
8713 }
8714 else
8715 {
8716 /* memory target */
8717 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8721 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8722 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8723 } IEM_MC_ELSE() {
8724 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8725 } IEM_MC_ENDIF();
8726 IEM_MC_ADVANCE_RIP_AND_FINISH();
8727 IEM_MC_END();
8728 }
8729}
8730
8731
8732/**
8733 * @opcode 0x9f
8734 * @opfltest zf,sf,of
8735 */
8736FNIEMOP_DEF(iemOp_setnle_Eb)
8737{
8738 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8739 IEMOP_HLP_MIN_386();
8740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8741
8742 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8743 * any way. AMD says it's "unused", whatever that means. We're
8744 * ignoring for now. */
8745 if (IEM_IS_MODRM_REG_MODE(bRm))
8746 {
8747 /* register target */
8748 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8750 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8751 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8752 } IEM_MC_ELSE() {
8753 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8754 } IEM_MC_ENDIF();
8755 IEM_MC_ADVANCE_RIP_AND_FINISH();
8756 IEM_MC_END();
8757 }
8758 else
8759 {
8760 /* memory target */
8761 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8765 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8766 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8767 } IEM_MC_ELSE() {
8768 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8769 } IEM_MC_ENDIF();
8770 IEM_MC_ADVANCE_RIP_AND_FINISH();
8771 IEM_MC_END();
8772 }
8773}
8774
8775
8776/** Opcode 0x0f 0xa0. */
8777FNIEMOP_DEF(iemOp_push_fs)
8778{
8779 IEMOP_MNEMONIC(push_fs, "push fs");
8780 IEMOP_HLP_MIN_386();
8781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8782 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8783}
8784
8785
8786/** Opcode 0x0f 0xa1. */
8787FNIEMOP_DEF(iemOp_pop_fs)
8788{
8789 IEMOP_MNEMONIC(pop_fs, "pop fs");
8790 IEMOP_HLP_MIN_386();
8791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8792 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8793 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8794 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8795 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8796 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8797 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8798 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8799 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8800}
8801
8802
8803/** Opcode 0x0f 0xa2. */
8804FNIEMOP_DEF(iemOp_cpuid)
8805{
8806 IEMOP_MNEMONIC(cpuid, "cpuid");
8807 IEMOP_HLP_MIN_486(); /* not all 486es. */
8808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8809 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8810 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8811 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8812 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8814 iemCImpl_cpuid);
8815}
8816
8817
8818/**
8819 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8820 * iemOp_bts_Ev_Gv.
8821 */
8822
8823#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8825 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8826 \
8827 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8828 { \
8829 /* register destination. */ \
8830 switch (pVCpu->iem.s.enmEffOpSize) \
8831 { \
8832 case IEMMODE_16BIT: \
8833 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8835 \
8836 IEM_MC_ARG(uint16_t, u16Src, 2); \
8837 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8838 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8839 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8840 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8841 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8842 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8843 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8844 \
8845 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8846 IEM_MC_END(); \
8847 break; \
8848 \
8849 case IEMMODE_32BIT: \
8850 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8852 \
8853 IEM_MC_ARG(uint32_t, u32Src, 2); \
8854 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8855 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8856 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8857 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8858 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8859 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8860 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8861 \
8862 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8863 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8864 IEM_MC_END(); \
8865 break; \
8866 \
8867 case IEMMODE_64BIT: \
8868 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8870 \
8871 IEM_MC_ARG(uint64_t, u64Src, 2); \
8872 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8873 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8874 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8875 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8876 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8877 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8878 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8879 \
8880 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8881 IEM_MC_END(); \
8882 break; \
8883 \
8884 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8885 } \
8886 } \
8887 else \
8888 { \
8889 /* memory destination. */ \
8890 /** @todo test negative bit offsets! */ \
8891 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8892 { \
8893 switch (pVCpu->iem.s.enmEffOpSize) \
8894 { \
8895 case IEMMODE_16BIT: \
8896 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8899 IEMOP_HLP_DONE_DECODING(); \
8900 \
8901 IEM_MC_ARG(uint16_t, u16Src, 2); \
8902 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8903 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8904 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8905 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8906 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8907 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8908 \
8909 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8910 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8911 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8912 \
8913 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8914 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8915 \
8916 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8917 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8918 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8919 IEM_MC_END(); \
8920 break; \
8921 \
8922 case IEMMODE_32BIT: \
8923 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8926 IEMOP_HLP_DONE_DECODING(); \
8927 \
8928 IEM_MC_ARG(uint32_t, u32Src, 2); \
8929 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8930 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8931 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8932 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8933 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8934 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
8935 \
8936 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8937 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8938 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8939 \
8940 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8941 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8942 \
8943 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8944 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8946 IEM_MC_END(); \
8947 break; \
8948 \
8949 case IEMMODE_64BIT: \
8950 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8953 IEMOP_HLP_DONE_DECODING(); \
8954 \
8955 IEM_MC_ARG(uint64_t, u64Src, 2); \
8956 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8957 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
8958 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
8959 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
8960 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
8961 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
8962 \
8963 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8964 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8965 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8966 \
8967 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8968 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8969 \
8970 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8971 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8972 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8973 IEM_MC_END(); \
8974 break; \
8975 \
8976 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8977 } \
8978 } \
8979 else \
8980 { \
8981 (void)0
8982/* Separate macro to work around parsing issue in IEMAllInstPython.py */
8983#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
8984 switch (pVCpu->iem.s.enmEffOpSize) \
8985 { \
8986 case IEMMODE_16BIT: \
8987 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8990 IEMOP_HLP_DONE_DECODING(); \
8991 \
8992 IEM_MC_ARG(uint16_t, u16Src, 2); \
8993 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8994 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8995 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8996 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8997 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8998 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8999 \
9000 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9001 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9002 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9003 \
9004 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9005 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
9006 \
9007 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9008 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9009 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9010 IEM_MC_END(); \
9011 break; \
9012 \
9013 case IEMMODE_32BIT: \
9014 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9017 IEMOP_HLP_DONE_DECODING(); \
9018 \
9019 IEM_MC_ARG(uint32_t, u32Src, 2); \
9020 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9021 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9022 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9023 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9024 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9025 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9026 \
9027 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9028 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9029 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9030 \
9031 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9032 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
9033 \
9034 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9035 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9036 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9037 IEM_MC_END(); \
9038 break; \
9039 \
9040 case IEMMODE_64BIT: \
9041 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9044 IEMOP_HLP_DONE_DECODING(); \
9045 \
9046 IEM_MC_ARG(uint64_t, u64Src, 2); \
9047 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9048 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9049 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9050 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9051 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9052 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9053 \
9054 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9055 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9056 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9057 \
9058 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9059 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
9060 \
9061 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9062 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9063 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9064 IEM_MC_END(); \
9065 break; \
9066 \
9067 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9068 } \
9069 } \
9070 } \
9071 (void)0
9072
9073/* Read-only version (bt). */
9074#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9076 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9077 \
9078 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9079 { \
9080 /* register destination. */ \
9081 switch (pVCpu->iem.s.enmEffOpSize) \
9082 { \
9083 case IEMMODE_16BIT: \
9084 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9086 \
9087 IEM_MC_ARG(uint16_t, u16Src, 2); \
9088 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9089 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9090 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9091 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9092 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9093 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9094 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9095 \
9096 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9097 IEM_MC_END(); \
9098 break; \
9099 \
9100 case IEMMODE_32BIT: \
9101 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9103 \
9104 IEM_MC_ARG(uint32_t, u32Src, 2); \
9105 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9106 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9107 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9108 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9109 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9110 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9111 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9112 \
9113 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9114 IEM_MC_END(); \
9115 break; \
9116 \
9117 case IEMMODE_64BIT: \
9118 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9120 \
9121 IEM_MC_ARG(uint64_t, u64Src, 2); \
9122 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9123 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9124 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9125 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9126 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9127 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9128 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9129 \
9130 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9131 IEM_MC_END(); \
9132 break; \
9133 \
9134 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9135 } \
9136 } \
9137 else \
9138 { \
9139 /* memory destination. */ \
9140 /** @todo test negative bit offsets! */ \
9141 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9142 { \
9143 switch (pVCpu->iem.s.enmEffOpSize) \
9144 { \
9145 case IEMMODE_16BIT: \
9146 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9149 IEMOP_HLP_DONE_DECODING(); \
9150 \
9151 IEM_MC_ARG(uint16_t, u16Src, 2); \
9152 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9153 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9154 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9155 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9156 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9157 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9158 \
9159 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9160 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9161 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9162 \
9163 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9164 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9165 \
9166 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9167 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9168 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9169 IEM_MC_END(); \
9170 break; \
9171 \
9172 case IEMMODE_32BIT: \
9173 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9176 IEMOP_HLP_DONE_DECODING(); \
9177 \
9178 IEM_MC_ARG(uint32_t, u32Src, 2); \
9179 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9180 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9181 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9182 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9183 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9184 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9185 \
9186 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9187 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9188 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9189 \
9190 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9191 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9192 \
9193 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9194 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9195 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9196 IEM_MC_END(); \
9197 break; \
9198 \
9199 case IEMMODE_64BIT: \
9200 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9203 IEMOP_HLP_DONE_DECODING(); \
9204 \
9205 IEM_MC_ARG(uint64_t, u64Src, 2); \
9206 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9207 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9208 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9209 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9210 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9211 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9212 \
9213 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9214 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9215 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9216 \
9217 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9218 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9219 \
9220 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9221 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9223 IEM_MC_END(); \
9224 break; \
9225 \
9226 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9227 } \
9228 } \
9229 else \
9230 { \
9231 IEMOP_HLP_DONE_DECODING(); \
9232 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9233 } \
9234 } \
9235 (void)0
9236
9237
9238/**
9239 * @opcode 0xa3
9240 * @oppfx n/a
9241 * @opflclass bitmap
9242 */
9243FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9244{
9245 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9246 IEMOP_HLP_MIN_386();
9247 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9248}
9249
9250
9251/**
9252 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9253 */
9254#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9255 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9256 \
9257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9258 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9259 \
9260 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9261 { \
9262 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9263 \
9264 switch (pVCpu->iem.s.enmEffOpSize) \
9265 { \
9266 case IEMMODE_16BIT: \
9267 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9269 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9270 IEM_MC_ARG(uint16_t, u16Src, 1); \
9271 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9272 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9273 \
9274 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9275 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9276 IEM_MC_REF_EFLAGS(pEFlags); \
9277 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9278 \
9279 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9280 IEM_MC_END(); \
9281 break; \
9282 \
9283 case IEMMODE_32BIT: \
9284 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9286 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9287 IEM_MC_ARG(uint32_t, u32Src, 1); \
9288 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9289 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9290 \
9291 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9292 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9293 IEM_MC_REF_EFLAGS(pEFlags); \
9294 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9295 \
9296 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9297 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9298 IEM_MC_END(); \
9299 break; \
9300 \
9301 case IEMMODE_64BIT: \
9302 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9304 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9305 IEM_MC_ARG(uint64_t, u64Src, 1); \
9306 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9307 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9308 \
9309 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9310 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9311 IEM_MC_REF_EFLAGS(pEFlags); \
9312 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9313 \
9314 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9315 IEM_MC_END(); \
9316 break; \
9317 \
9318 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9319 } \
9320 } \
9321 else \
9322 { \
9323 switch (pVCpu->iem.s.enmEffOpSize) \
9324 { \
9325 case IEMMODE_16BIT: \
9326 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9329 \
9330 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9332 \
9333 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9334 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9335 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9336 \
9337 IEM_MC_ARG(uint16_t, u16Src, 1); \
9338 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9339 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9340 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9341 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9342 \
9343 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9344 IEM_MC_COMMIT_EFLAGS(EFlags); \
9345 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9346 IEM_MC_END(); \
9347 break; \
9348 \
9349 case IEMMODE_32BIT: \
9350 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9353 \
9354 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9356 \
9357 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9358 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9359 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9360 \
9361 IEM_MC_ARG(uint32_t, u32Src, 1); \
9362 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9363 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9364 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9365 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9366 \
9367 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9368 IEM_MC_COMMIT_EFLAGS(EFlags); \
9369 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9370 IEM_MC_END(); \
9371 break; \
9372 \
9373 case IEMMODE_64BIT: \
9374 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9377 \
9378 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9380 \
9381 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9382 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9383 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9384 \
9385 IEM_MC_ARG(uint64_t, u64Src, 1); \
9386 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9387 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9388 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9389 \
9390 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9391 \
9392 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9393 IEM_MC_COMMIT_EFLAGS(EFlags); \
9394 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9395 IEM_MC_END(); \
9396 break; \
9397 \
9398 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9399 } \
9400 } (void)0
9401
9402
9403/**
9404 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9405 */
9406#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9407 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9408 \
9409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9410 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9411 \
9412 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9413 { \
9414 switch (pVCpu->iem.s.enmEffOpSize) \
9415 { \
9416 case IEMMODE_16BIT: \
9417 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9419 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9420 IEM_MC_ARG(uint16_t, u16Src, 1); \
9421 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9422 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9423 \
9424 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9425 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9426 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9427 IEM_MC_REF_EFLAGS(pEFlags); \
9428 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9429 \
9430 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9431 IEM_MC_END(); \
9432 break; \
9433 \
9434 case IEMMODE_32BIT: \
9435 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9437 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9438 IEM_MC_ARG(uint32_t, u32Src, 1); \
9439 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9440 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9441 \
9442 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9443 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9444 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9445 IEM_MC_REF_EFLAGS(pEFlags); \
9446 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9447 \
9448 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9449 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9450 IEM_MC_END(); \
9451 break; \
9452 \
9453 case IEMMODE_64BIT: \
9454 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9456 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9457 IEM_MC_ARG(uint64_t, u64Src, 1); \
9458 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9459 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9460 \
9461 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9462 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9463 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9464 IEM_MC_REF_EFLAGS(pEFlags); \
9465 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9466 \
9467 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9468 IEM_MC_END(); \
9469 break; \
9470 \
9471 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9472 } \
9473 } \
9474 else \
9475 { \
9476 switch (pVCpu->iem.s.enmEffOpSize) \
9477 { \
9478 case IEMMODE_16BIT: \
9479 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9480 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9481 IEM_MC_ARG(uint16_t, u16Src, 1); \
9482 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9484 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9485 \
9486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9488 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9489 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9490 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9491 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9492 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9493 \
9494 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9495 IEM_MC_COMMIT_EFLAGS(EFlags); \
9496 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9497 IEM_MC_END(); \
9498 break; \
9499 \
9500 case IEMMODE_32BIT: \
9501 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9502 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9503 IEM_MC_ARG(uint32_t, u32Src, 1); \
9504 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9506 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9507 \
9508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9510 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9511 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9512 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9513 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9514 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9515 \
9516 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9517 IEM_MC_COMMIT_EFLAGS(EFlags); \
9518 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9519 IEM_MC_END(); \
9520 break; \
9521 \
9522 case IEMMODE_64BIT: \
9523 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9524 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9525 IEM_MC_ARG(uint64_t, u64Src, 1); \
9526 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9528 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9529 \
9530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9532 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9533 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9534 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9535 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9536 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9537 \
9538 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9539 IEM_MC_COMMIT_EFLAGS(EFlags); \
9540 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9541 IEM_MC_END(); \
9542 break; \
9543 \
9544 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9545 } \
9546 } (void)0
9547
9548
9549/**
9550 * @opcode 0xa4
9551 * @opflclass shift_count
9552 */
9553FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9554{
9555 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9556 IEMOP_HLP_MIN_386();
9557 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9558}
9559
9560
9561/**
9562 * @opcode 0xa5
9563 * @opflclass shift_count
9564 */
9565FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9566{
9567 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9568 IEMOP_HLP_MIN_386();
9569 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9570}
9571
9572
9573/** Opcode 0x0f 0xa8. */
9574FNIEMOP_DEF(iemOp_push_gs)
9575{
9576 IEMOP_MNEMONIC(push_gs, "push gs");
9577 IEMOP_HLP_MIN_386();
9578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9579 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9580}
9581
9582
9583/** Opcode 0x0f 0xa9. */
9584FNIEMOP_DEF(iemOp_pop_gs)
9585{
9586 IEMOP_MNEMONIC(pop_gs, "pop gs");
9587 IEMOP_HLP_MIN_386();
9588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9589 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9590 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9591 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9592 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9593 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9594 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9595 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9596 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9597}
9598
9599
9600/** Opcode 0x0f 0xaa. */
9601FNIEMOP_DEF(iemOp_rsm)
9602{
9603 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9604 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9606 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9607 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9608 iemCImpl_rsm);
9609}
9610
9611
9612
9613/**
9614 * @opcode 0xab
9615 * @oppfx n/a
9616 * @opflclass bitmap
9617 */
9618FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9619{
9620 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9621 IEMOP_HLP_MIN_386();
9622 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9623 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9624}
9625
9626
9627/**
9628 * @opcode 0xac
9629 * @opflclass shift_count
9630 */
9631FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9632{
9633 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9634 IEMOP_HLP_MIN_386();
9635 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9636}
9637
9638
9639/**
9640 * @opcode 0xad
9641 * @opflclass shift_count
9642 */
9643FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9644{
9645 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9646 IEMOP_HLP_MIN_386();
9647 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9648}
9649
9650
9651/** Opcode 0x0f 0xae mem/0. */
9652FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9653{
9654 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9655 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9656 IEMOP_RAISE_INVALID_OPCODE_RET();
9657
9658 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9659 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9662 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9663 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9664 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9665 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9666 IEM_MC_END();
9667}
9668
9669
9670/** Opcode 0x0f 0xae mem/1. */
9671FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9672{
9673 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9674 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9675 IEMOP_RAISE_INVALID_OPCODE_RET();
9676
9677 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9678 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9681 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9682 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9683 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9684 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9685 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9686 IEM_MC_END();
9687}
9688
9689
9690/**
9691 * @opmaps grp15
9692 * @opcode !11/2
9693 * @oppfx none
9694 * @opcpuid sse
9695 * @opgroup og_sse_mxcsrsm
9696 * @opxcpttype 5
9697 * @optest op1=0 -> mxcsr=0
9698 * @optest op1=0x2083 -> mxcsr=0x2083
9699 * @optest op1=0xfffffffe -> value.xcpt=0xd
9700 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9701 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9702 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9703 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9704 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9705 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9706 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9707 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9708 */
9709FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9710{
9711 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9712 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9713 IEMOP_RAISE_INVALID_OPCODE_RET();
9714
9715 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9716 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9719 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9720 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9721 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9722 IEM_MC_END();
9723}
9724
9725
9726/**
9727 * @opmaps grp15
9728 * @opcode !11/3
9729 * @oppfx none
9730 * @opcpuid sse
9731 * @opgroup og_sse_mxcsrsm
9732 * @opxcpttype 5
9733 * @optest mxcsr=0 -> op1=0
9734 * @optest mxcsr=0x2083 -> op1=0x2083
9735 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9736 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9737 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9738 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9739 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9740 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9741 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9742 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9743 */
9744FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9745{
9746 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9747 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9748 IEMOP_RAISE_INVALID_OPCODE_RET();
9749
9750 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9751 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9754 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9755 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9756 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9757 IEM_MC_END();
9758}
9759
9760
9761/**
9762 * @opmaps grp15
9763 * @opcode !11/4
9764 * @oppfx none
9765 * @opcpuid xsave
9766 * @opgroup og_system
9767 * @opxcpttype none
9768 */
9769FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9770{
9771 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9772 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9773 IEMOP_RAISE_INVALID_OPCODE_RET();
9774
9775 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9776 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9779 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9780 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9781 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9782 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9783 IEM_MC_END();
9784}
9785
9786
9787/**
9788 * @opmaps grp15
9789 * @opcode !11/5
9790 * @oppfx none
9791 * @opcpuid xsave
9792 * @opgroup og_system
9793 * @opxcpttype none
9794 */
9795FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9796{
9797 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9798 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9799 IEMOP_RAISE_INVALID_OPCODE_RET();
9800
9801 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9802 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9805 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9806 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9807 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9808 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9809 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9810 IEM_MC_END();
9811}
9812
9813/** Opcode 0x0f 0xae mem/6. */
9814FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9815
9816/**
9817 * @opmaps grp15
9818 * @opcode !11/7
9819 * @oppfx none
9820 * @opcpuid clfsh
9821 * @opgroup og_cachectl
9822 * @optest op1=1 ->
9823 */
9824FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9825{
9826 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9827 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9828 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9829
9830 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9831 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9834 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9835 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9836 IEM_MC_END();
9837}
9838
9839/**
9840 * @opmaps grp15
9841 * @opcode !11/7
9842 * @oppfx 0x66
9843 * @opcpuid clflushopt
9844 * @opgroup og_cachectl
9845 * @optest op1=1 ->
9846 */
9847FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9848{
9849 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9850 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9851 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9852
9853 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9854 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9857 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9858 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9859 IEM_MC_END();
9860}
9861
9862
9863/** Opcode 0x0f 0xae 11b/5. */
9864FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9865{
9866 RT_NOREF_PV(bRm);
9867 IEMOP_MNEMONIC(lfence, "lfence");
9868 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9870#ifdef RT_ARCH_ARM64
9871 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9872#else
9873 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9874 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9875 else
9876 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9877#endif
9878 IEM_MC_ADVANCE_RIP_AND_FINISH();
9879 IEM_MC_END();
9880}
9881
9882
9883/** Opcode 0x0f 0xae 11b/6. */
9884FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9885{
9886 RT_NOREF_PV(bRm);
9887 IEMOP_MNEMONIC(mfence, "mfence");
9888 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9890#ifdef RT_ARCH_ARM64
9891 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9892#else
9893 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9894 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9895 else
9896 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9897#endif
9898 IEM_MC_ADVANCE_RIP_AND_FINISH();
9899 IEM_MC_END();
9900}
9901
9902
9903/** Opcode 0x0f 0xae 11b/7. */
9904FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9905{
9906 RT_NOREF_PV(bRm);
9907 IEMOP_MNEMONIC(sfence, "sfence");
9908 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9910#ifdef RT_ARCH_ARM64
9911 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9912#else
9913 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9914 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9915 else
9916 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9917#endif
9918 IEM_MC_ADVANCE_RIP_AND_FINISH();
9919 IEM_MC_END();
9920}
9921
9922
9923/** Opcode 0xf3 0x0f 0xae 11b/0. */
9924FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9925{
9926 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9927 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9928 {
9929 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9931 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9932 IEM_MC_LOCAL(uint64_t, u64Dst);
9933 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9934 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9935 IEM_MC_ADVANCE_RIP_AND_FINISH();
9936 IEM_MC_END();
9937 }
9938 else
9939 {
9940 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9942 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9943 IEM_MC_LOCAL(uint32_t, u32Dst);
9944 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9945 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9946 IEM_MC_ADVANCE_RIP_AND_FINISH();
9947 IEM_MC_END();
9948 }
9949}
9950
9951
9952/** Opcode 0xf3 0x0f 0xae 11b/1. */
9953FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9954{
9955 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9956 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9957 {
9958 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9960 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9961 IEM_MC_LOCAL(uint64_t, u64Dst);
9962 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9963 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9964 IEM_MC_ADVANCE_RIP_AND_FINISH();
9965 IEM_MC_END();
9966 }
9967 else
9968 {
9969 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9971 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9972 IEM_MC_LOCAL(uint32_t, u32Dst);
9973 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9974 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9975 IEM_MC_ADVANCE_RIP_AND_FINISH();
9976 IEM_MC_END();
9977 }
9978}
9979
9980
9981/** Opcode 0xf3 0x0f 0xae 11b/2. */
9982FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9983{
9984 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9985 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9986 {
9987 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9989 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9990 IEM_MC_LOCAL(uint64_t, u64Dst);
9991 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9992 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9993 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9994 IEM_MC_ADVANCE_RIP_AND_FINISH();
9995 IEM_MC_END();
9996 }
9997 else
9998 {
9999 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10001 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10002 IEM_MC_LOCAL(uint32_t, u32Dst);
10003 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10004 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10005 IEM_MC_ADVANCE_RIP_AND_FINISH();
10006 IEM_MC_END();
10007 }
10008}
10009
10010
10011/** Opcode 0xf3 0x0f 0xae 11b/3. */
10012FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10013{
10014 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10015 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10016 {
10017 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10019 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10020 IEM_MC_LOCAL(uint64_t, u64Dst);
10021 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10022 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10023 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10024 IEM_MC_ADVANCE_RIP_AND_FINISH();
10025 IEM_MC_END();
10026 }
10027 else
10028 {
10029 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10031 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10032 IEM_MC_LOCAL(uint32_t, u32Dst);
10033 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10034 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10035 IEM_MC_ADVANCE_RIP_AND_FINISH();
10036 IEM_MC_END();
10037 }
10038}
10039
10040
10041/**
10042 * Group 15 jump table for register variant.
10043 */
10044IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10045{ /* pfx: none, 066h, 0f3h, 0f2h */
10046 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10047 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10048 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10049 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10050 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10051 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10052 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10053 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10054};
10055AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10056
10057
10058/**
10059 * Group 15 jump table for memory variant.
10060 */
10061IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10062{ /* pfx: none, 066h, 0f3h, 0f2h */
10063 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10064 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10065 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10066 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10067 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10068 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10069 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10070 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10071};
10072AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10073
10074
10075/** Opcode 0x0f 0xae. */
10076FNIEMOP_DEF(iemOp_Grp15)
10077{
10078 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10080 if (IEM_IS_MODRM_REG_MODE(bRm))
10081 /* register, register */
10082 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10083 + pVCpu->iem.s.idxPrefix], bRm);
10084 /* memory, register */
10085 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10086 + pVCpu->iem.s.idxPrefix], bRm);
10087}
10088
10089
10090/**
10091 * @opcode 0xaf
10092 * @opflclass multiply
10093 */
10094FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10095{
10096 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10097 IEMOP_HLP_MIN_386();
10098 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10099 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10101 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10102}
10103
10104
10105/**
10106 * @opcode 0xb0
10107 * @opflclass arithmetic
10108 */
10109FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10110{
10111 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10112 IEMOP_HLP_MIN_486();
10113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10114
10115 if (IEM_IS_MODRM_REG_MODE(bRm))
10116 {
10117 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10119 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10120 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10121 IEM_MC_ARG(uint8_t, u8Src, 2);
10122 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10123
10124 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10125 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10126 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10127 IEM_MC_REF_EFLAGS(pEFlags);
10128 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10129
10130 IEM_MC_ADVANCE_RIP_AND_FINISH();
10131 IEM_MC_END();
10132 }
10133 else
10134 {
10135#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10136 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10139 IEMOP_HLP_DONE_DECODING(); \
10140 \
10141 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10142 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10143 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10144 \
10145 IEM_MC_ARG(uint8_t, u8Src, 2); \
10146 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10147 \
10148 IEM_MC_LOCAL(uint8_t, u8Al); \
10149 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10150 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10151 \
10152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10153 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10154 \
10155 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10156 IEM_MC_COMMIT_EFLAGS(EFlags); \
10157 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10158 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10159 IEM_MC_END()
10160
10161 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10162 {
10163 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10164 }
10165 else
10166 {
10167 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10168 }
10169 }
10170}
10171
10172/**
10173 * @opcode 0xb1
10174 * @opflclass arithmetic
10175 */
10176FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10177{
10178 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10179 IEMOP_HLP_MIN_486();
10180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10181
10182 if (IEM_IS_MODRM_REG_MODE(bRm))
10183 {
10184 switch (pVCpu->iem.s.enmEffOpSize)
10185 {
10186 case IEMMODE_16BIT:
10187 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10189 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10190 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10191 IEM_MC_ARG(uint16_t, u16Src, 2);
10192 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10193
10194 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10195 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10196 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10197 IEM_MC_REF_EFLAGS(pEFlags);
10198 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10199
10200 IEM_MC_ADVANCE_RIP_AND_FINISH();
10201 IEM_MC_END();
10202 break;
10203
10204 case IEMMODE_32BIT:
10205 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10207 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10208 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10209 IEM_MC_ARG(uint32_t, u32Src, 2);
10210 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10211
10212 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10213 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10214 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10215 IEM_MC_REF_EFLAGS(pEFlags);
10216 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10217
10218 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10219 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10220 } IEM_MC_ELSE() {
10221 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10222 } IEM_MC_ENDIF();
10223
10224 IEM_MC_ADVANCE_RIP_AND_FINISH();
10225 IEM_MC_END();
10226 break;
10227
10228 case IEMMODE_64BIT:
10229 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10231 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10232 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10233 IEM_MC_ARG(uint64_t, u64Src, 2);
10234 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10235
10236 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10237 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10238 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10239 IEM_MC_REF_EFLAGS(pEFlags);
10240 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10241
10242 IEM_MC_ADVANCE_RIP_AND_FINISH();
10243 IEM_MC_END();
10244 break;
10245
10246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10247 }
10248 }
10249 else
10250 {
10251#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10252 do { \
10253 switch (pVCpu->iem.s.enmEffOpSize) \
10254 { \
10255 case IEMMODE_16BIT: \
10256 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10257 \
10258 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10261 IEMOP_HLP_DONE_DECODING(); \
10262 \
10263 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10264 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10265 \
10266 IEM_MC_ARG(uint16_t, u16Src, 2); \
10267 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10268 \
10269 IEM_MC_LOCAL(uint16_t, u16Ax); \
10270 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10271 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10272 \
10273 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10274 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10275 \
10276 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10277 IEM_MC_COMMIT_EFLAGS(EFlags); \
10278 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10279 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10280 IEM_MC_END(); \
10281 break; \
10282 \
10283 case IEMMODE_32BIT: \
10284 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10287 IEMOP_HLP_DONE_DECODING(); \
10288 \
10289 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10290 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10291 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10292 \
10293 IEM_MC_ARG(uint32_t, u32Src, 2); \
10294 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10295 \
10296 IEM_MC_LOCAL(uint32_t, u32Eax); \
10297 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10298 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10299 \
10300 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10301 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10302 \
10303 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10304 IEM_MC_COMMIT_EFLAGS(EFlags); \
10305 \
10306 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10307 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10308 } IEM_MC_ENDIF(); \
10309 \
10310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10311 IEM_MC_END(); \
10312 break; \
10313 \
10314 case IEMMODE_64BIT: \
10315 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10318 IEMOP_HLP_DONE_DECODING(); \
10319 \
10320 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10321 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10322 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10323 \
10324 IEM_MC_ARG(uint64_t, u64Src, 2); \
10325 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10326 \
10327 IEM_MC_LOCAL(uint64_t, u64Rax); \
10328 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10329 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10330 \
10331 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10332 \
10333 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10334 \
10335 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10336 IEM_MC_COMMIT_EFLAGS(EFlags); \
10337 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10339 IEM_MC_END(); \
10340 break; \
10341 \
10342 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10343 } \
10344 } while (0)
10345
10346 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10347 {
10348 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10349 }
10350 else
10351 {
10352 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10353 }
10354 }
10355}
10356
10357
10358/** Opcode 0x0f 0xb2. */
10359FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10360{
10361 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10362 IEMOP_HLP_MIN_386();
10363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10364 if (IEM_IS_MODRM_REG_MODE(bRm))
10365 IEMOP_RAISE_INVALID_OPCODE_RET();
10366 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10367}
10368
10369
10370/**
10371 * @opcode 0xb3
10372 * @oppfx n/a
10373 * @opflclass bitmap
10374 */
10375FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10376{
10377 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10378 IEMOP_HLP_MIN_386();
10379 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10380 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10381}
10382
10383
10384/** Opcode 0x0f 0xb4. */
10385FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10386{
10387 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10388 IEMOP_HLP_MIN_386();
10389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10390 if (IEM_IS_MODRM_REG_MODE(bRm))
10391 IEMOP_RAISE_INVALID_OPCODE_RET();
10392 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10393}
10394
10395
10396/** Opcode 0x0f 0xb5. */
10397FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10398{
10399 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10400 IEMOP_HLP_MIN_386();
10401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10402 if (IEM_IS_MODRM_REG_MODE(bRm))
10403 IEMOP_RAISE_INVALID_OPCODE_RET();
10404 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10405}
10406
10407
10408/** Opcode 0x0f 0xb6. */
10409FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10410{
10411 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10412 IEMOP_HLP_MIN_386();
10413
10414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10415
10416 /*
10417 * If rm is denoting a register, no more instruction bytes.
10418 */
10419 if (IEM_IS_MODRM_REG_MODE(bRm))
10420 {
10421 switch (pVCpu->iem.s.enmEffOpSize)
10422 {
10423 case IEMMODE_16BIT:
10424 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10426 IEM_MC_LOCAL(uint16_t, u16Value);
10427 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10428 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10429 IEM_MC_ADVANCE_RIP_AND_FINISH();
10430 IEM_MC_END();
10431 break;
10432
10433 case IEMMODE_32BIT:
10434 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10436 IEM_MC_LOCAL(uint32_t, u32Value);
10437 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10438 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10439 IEM_MC_ADVANCE_RIP_AND_FINISH();
10440 IEM_MC_END();
10441 break;
10442
10443 case IEMMODE_64BIT:
10444 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10446 IEM_MC_LOCAL(uint64_t, u64Value);
10447 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10448 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10449 IEM_MC_ADVANCE_RIP_AND_FINISH();
10450 IEM_MC_END();
10451 break;
10452
10453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10454 }
10455 }
10456 else
10457 {
10458 /*
10459 * We're loading a register from memory.
10460 */
10461 switch (pVCpu->iem.s.enmEffOpSize)
10462 {
10463 case IEMMODE_16BIT:
10464 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10465 IEM_MC_LOCAL(uint16_t, u16Value);
10466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10469 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10470 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10471 IEM_MC_ADVANCE_RIP_AND_FINISH();
10472 IEM_MC_END();
10473 break;
10474
10475 case IEMMODE_32BIT:
10476 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10477 IEM_MC_LOCAL(uint32_t, u32Value);
10478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10481 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10482 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10483 IEM_MC_ADVANCE_RIP_AND_FINISH();
10484 IEM_MC_END();
10485 break;
10486
10487 case IEMMODE_64BIT:
10488 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10489 IEM_MC_LOCAL(uint64_t, u64Value);
10490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10493 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10494 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10495 IEM_MC_ADVANCE_RIP_AND_FINISH();
10496 IEM_MC_END();
10497 break;
10498
10499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10500 }
10501 }
10502}
10503
10504
10505/** Opcode 0x0f 0xb7. */
10506FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10507{
10508 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10509 IEMOP_HLP_MIN_386();
10510
10511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10512
10513 /** @todo Not entirely sure how the operand size prefix is handled here,
10514 * assuming that it will be ignored. Would be nice to have a few
10515 * test for this. */
10516
10517 /** @todo There should be no difference in the behaviour whether REX.W is
10518 * present or not... */
10519
10520 /*
10521 * If rm is denoting a register, no more instruction bytes.
10522 */
10523 if (IEM_IS_MODRM_REG_MODE(bRm))
10524 {
10525 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10526 {
10527 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10529 IEM_MC_LOCAL(uint32_t, u32Value);
10530 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10531 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10532 IEM_MC_ADVANCE_RIP_AND_FINISH();
10533 IEM_MC_END();
10534 }
10535 else
10536 {
10537 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10539 IEM_MC_LOCAL(uint64_t, u64Value);
10540 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10541 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10542 IEM_MC_ADVANCE_RIP_AND_FINISH();
10543 IEM_MC_END();
10544 }
10545 }
10546 else
10547 {
10548 /*
10549 * We're loading a register from memory.
10550 */
10551 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10552 {
10553 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10554 IEM_MC_LOCAL(uint32_t, u32Value);
10555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10558 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10559 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10560 IEM_MC_ADVANCE_RIP_AND_FINISH();
10561 IEM_MC_END();
10562 }
10563 else
10564 {
10565 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10566 IEM_MC_LOCAL(uint64_t, u64Value);
10567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10570 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10571 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10572 IEM_MC_ADVANCE_RIP_AND_FINISH();
10573 IEM_MC_END();
10574 }
10575 }
10576}
10577
10578
10579/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10580FNIEMOP_UD_STUB(iemOp_jmpe);
10581
10582
10583/**
10584 * @opcode 0xb8
10585 * @oppfx 0xf3
10586 * @opflmodify cf,pf,af,zf,sf,of
10587 * @opflclear cf,pf,af,sf,of
10588 */
10589FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10590{
10591 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10592 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10593 return iemOp_InvalidNeedRM(pVCpu);
10594#ifndef TST_IEM_CHECK_MC
10595# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10596 static const IEMOPBINSIZES s_Native =
10597 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10598# endif
10599 static const IEMOPBINSIZES s_Fallback =
10600 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10601#endif
10602 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10604 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10605}
10606
10607
10608/**
10609 * @opcode 0xb9
10610 * @opinvalid intel-modrm
10611 * @optest ->
10612 */
10613FNIEMOP_DEF(iemOp_Grp10)
10614{
10615 /*
10616 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10617 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10618 */
10619 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10620 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10621 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10622}
10623
10624
10625/**
10626 * Body for group 8 bit instruction.
10627 */
10628#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10629 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10630 \
10631 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10632 { \
10633 /* register destination. */ \
10634 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10635 \
10636 switch (pVCpu->iem.s.enmEffOpSize) \
10637 { \
10638 case IEMMODE_16BIT: \
10639 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10641 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10642 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10643 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10644 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10645 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10646 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10647 \
10648 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10649 IEM_MC_END(); \
10650 break; \
10651 \
10652 case IEMMODE_32BIT: \
10653 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10655 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10656 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10657 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10658 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10659 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10660 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10661 \
10662 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10663 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10664 IEM_MC_END(); \
10665 break; \
10666 \
10667 case IEMMODE_64BIT: \
10668 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10670 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10671 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10672 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10673 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10674 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10675 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10676 \
10677 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10678 IEM_MC_END(); \
10679 break; \
10680 \
10681 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10682 } \
10683 } \
10684 else \
10685 { \
10686 /* memory destination. */ \
10687 /** @todo test negative bit offsets! */ \
10688 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10689 { \
10690 switch (pVCpu->iem.s.enmEffOpSize) \
10691 { \
10692 case IEMMODE_16BIT: \
10693 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10696 \
10697 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10698 IEMOP_HLP_DONE_DECODING(); \
10699 \
10700 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10701 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10702 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10703 \
10704 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10705 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10706 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10707 \
10708 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10709 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10711 IEM_MC_END(); \
10712 break; \
10713 \
10714 case IEMMODE_32BIT: \
10715 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10718 \
10719 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10720 IEMOP_HLP_DONE_DECODING(); \
10721 \
10722 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10723 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10724 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10725 \
10726 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10727 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10728 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10729 \
10730 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10731 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10732 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10733 IEM_MC_END(); \
10734 break; \
10735 \
10736 case IEMMODE_64BIT: \
10737 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10740 \
10741 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10742 IEMOP_HLP_DONE_DECODING(); \
10743 \
10744 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10745 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10746 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10747 \
10748 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10749 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10750 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10751 \
10752 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10753 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10754 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10755 IEM_MC_END(); \
10756 break; \
10757 \
10758 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10759 } \
10760 } \
10761 else \
10762 { \
10763 (void)0
10764/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10765#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10766 switch (pVCpu->iem.s.enmEffOpSize) \
10767 { \
10768 case IEMMODE_16BIT: \
10769 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10772 \
10773 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10774 IEMOP_HLP_DONE_DECODING(); \
10775 \
10776 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10777 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10778 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10779 \
10780 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10781 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10782 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
10783 \
10784 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10785 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10786 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10787 IEM_MC_END(); \
10788 break; \
10789 \
10790 case IEMMODE_32BIT: \
10791 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10794 \
10795 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10796 IEMOP_HLP_DONE_DECODING(); \
10797 \
10798 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10799 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10800 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10801 \
10802 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10803 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10804 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
10805 \
10806 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10807 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10808 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10809 IEM_MC_END(); \
10810 break; \
10811 \
10812 case IEMMODE_64BIT: \
10813 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10816 \
10817 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10818 IEMOP_HLP_DONE_DECODING(); \
10819 \
10820 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10821 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10822 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10823 \
10824 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10825 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10826 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
10827 \
10828 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10829 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10830 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10831 IEM_MC_END(); \
10832 break; \
10833 \
10834 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10835 } \
10836 } \
10837 } \
10838 (void)0
10839
10840/* Read-only version (bt) */
10841#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10842 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10843 \
10844 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10845 { \
10846 /* register destination. */ \
10847 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10848 \
10849 switch (pVCpu->iem.s.enmEffOpSize) \
10850 { \
10851 case IEMMODE_16BIT: \
10852 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10854 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10855 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10856 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10857 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10858 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10859 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10860 \
10861 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10862 IEM_MC_END(); \
10863 break; \
10864 \
10865 case IEMMODE_32BIT: \
10866 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10868 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10869 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10870 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10871 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10872 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10873 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10874 \
10875 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10876 IEM_MC_END(); \
10877 break; \
10878 \
10879 case IEMMODE_64BIT: \
10880 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10882 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10883 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10884 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10885 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10886 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10887 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10888 \
10889 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10890 IEM_MC_END(); \
10891 break; \
10892 \
10893 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10894 } \
10895 } \
10896 else \
10897 { \
10898 /* memory destination. */ \
10899 /** @todo test negative bit offsets! */ \
10900 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10901 { \
10902 switch (pVCpu->iem.s.enmEffOpSize) \
10903 { \
10904 case IEMMODE_16BIT: \
10905 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10908 \
10909 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10910 IEMOP_HLP_DONE_DECODING(); \
10911 \
10912 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10913 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10914 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10915 \
10916 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10917 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10918 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10919 \
10920 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10921 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10922 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10923 IEM_MC_END(); \
10924 break; \
10925 \
10926 case IEMMODE_32BIT: \
10927 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10930 \
10931 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10932 IEMOP_HLP_DONE_DECODING(); \
10933 \
10934 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10935 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10936 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10937 \
10938 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10939 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10940 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10941 \
10942 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10943 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10944 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10945 IEM_MC_END(); \
10946 break; \
10947 \
10948 case IEMMODE_64BIT: \
10949 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10952 \
10953 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10954 IEMOP_HLP_DONE_DECODING(); \
10955 \
10956 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10957 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10958 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10959 \
10960 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10961 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10962 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10963 \
10964 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10965 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10967 IEM_MC_END(); \
10968 break; \
10969 \
10970 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10971 } \
10972 } \
10973 else \
10974 { \
10975 IEMOP_HLP_DONE_DECODING(); \
10976 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
10977 } \
10978 } \
10979 (void)0
10980
10981
10982/**
10983 * @opmaps grp8
10984 * @opcode /4
10985 * @oppfx n/a
10986 * @opflclass bitmap
10987 */
10988FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
10989{
10990 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
10991 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
10992}
10993
10994
10995/**
10996 * @opmaps grp8
10997 * @opcode /5
10998 * @oppfx n/a
10999 * @opflclass bitmap
11000 */
11001FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11002{
11003 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11004 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11005 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11006}
11007
11008
11009/**
11010 * @opmaps grp8
11011 * @opcode /6
11012 * @oppfx n/a
11013 * @opflclass bitmap
11014 */
11015FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11016{
11017 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11018 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11019 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11020}
11021
11022
11023/**
11024 * @opmaps grp8
11025 * @opcode /7
11026 * @oppfx n/a
11027 * @opflclass bitmap
11028 */
11029FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11030{
11031 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11032 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11033 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11034}
11035
11036
11037/** Opcode 0x0f 0xba. */
11038FNIEMOP_DEF(iemOp_Grp8)
11039{
11040 IEMOP_HLP_MIN_386();
11041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11042 switch (IEM_GET_MODRM_REG_8(bRm))
11043 {
11044 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11045 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11046 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11047 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11048
11049 case 0: case 1: case 2: case 3:
11050 /* Both AMD and Intel want full modr/m decoding and imm8. */
11051 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11052
11053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11054 }
11055}
11056
11057
11058/**
11059 * @opcode 0xbb
11060 * @oppfx n/a
11061 * @opflclass bitmap
11062 */
11063FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11064{
11065 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11066 IEMOP_HLP_MIN_386();
11067 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11068 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11069}
11070
11071
11072/**
11073 * Body for BSF and BSR instructions.
11074 *
11075 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11076 * the destination register, which means that for 32-bit operations the high
11077 * bits must be left alone.
11078 *
11079 * @param pImpl Pointer to the instruction implementation (assembly).
11080 */
11081#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
11082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
11083 \
11084 /* \
11085 * If rm is denoting a register, no more instruction bytes. \
11086 */ \
11087 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11088 { \
11089 switch (pVCpu->iem.s.enmEffOpSize) \
11090 { \
11091 case IEMMODE_16BIT: \
11092 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11094 \
11095 IEM_MC_ARG(uint16_t, u16Src, 2); \
11096 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11097 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11098 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11099 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11100 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11101 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11102 \
11103 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11104 IEM_MC_END(); \
11105 break; \
11106 \
11107 case IEMMODE_32BIT: \
11108 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11110 \
11111 IEM_MC_ARG(uint32_t, u32Src, 2); \
11112 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11113 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11114 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11115 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11116 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11117 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11118 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11119 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11120 } IEM_MC_ENDIF(); \
11121 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11122 IEM_MC_END(); \
11123 break; \
11124 \
11125 case IEMMODE_64BIT: \
11126 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11128 \
11129 IEM_MC_ARG(uint64_t, u64Src, 2); \
11130 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11131 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11132 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11133 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11134 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11135 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11136 \
11137 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11138 IEM_MC_END(); \
11139 break; \
11140 \
11141 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11142 } \
11143 } \
11144 else \
11145 { \
11146 /* \
11147 * We're accessing memory. \
11148 */ \
11149 switch (pVCpu->iem.s.enmEffOpSize) \
11150 { \
11151 case IEMMODE_16BIT: \
11152 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11156 \
11157 IEM_MC_ARG(uint16_t, u16Src, 2); \
11158 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11159 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11160 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11161 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11162 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11163 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11164 \
11165 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11166 IEM_MC_END(); \
11167 break; \
11168 \
11169 case IEMMODE_32BIT: \
11170 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11174 \
11175 IEM_MC_ARG(uint32_t, u32Src, 2); \
11176 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11177 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11178 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11179 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11180 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11181 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11182 \
11183 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11184 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11185 } IEM_MC_ENDIF(); \
11186 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11187 IEM_MC_END(); \
11188 break; \
11189 \
11190 case IEMMODE_64BIT: \
11191 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11195 \
11196 IEM_MC_ARG(uint64_t, u64Src, 2); \
11197 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11198 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11199 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11200 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11201 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11202 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11203 \
11204 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11205 IEM_MC_END(); \
11206 break; \
11207 \
11208 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11209 } \
11210 } (void)0
11211
11212
11213/**
11214 * @opcode 0xbc
11215 * @oppfx !0xf3
11216 * @opfltest cf,pf,af,sf,of
11217 * @opflmodify cf,pf,af,zf,sf,of
11218 * @opflundef cf,pf,af,sf,of
11219 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11220 * document them as inputs. Sigh.
11221 */
11222FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11223{
11224 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11225 IEMOP_HLP_MIN_386();
11226 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11227 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11228 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11229}
11230
11231
11232/**
11233 * @opcode 0xbc
11234 * @oppfx 0xf3
11235 * @opfltest pf,af,sf,of
11236 * @opflmodify cf,pf,af,zf,sf,of
11237 * @opflundef pf,af,sf,of
11238 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11239 * document them as inputs. Sigh.
11240 */
11241FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11242{
11243 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11244 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11245 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11246
11247#ifndef TST_IEM_CHECK_MC
11248 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11249 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11250 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11251 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11252 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11253 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11254 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11255 {
11256 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11257 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11258 };
11259#endif
11260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11261 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11262 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11264 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11265}
11266
11267
11268/**
11269 * @opcode 0xbd
11270 * @oppfx !0xf3
11271 * @opfltest cf,pf,af,sf,of
11272 * @opflmodify cf,pf,af,zf,sf,of
11273 * @opflundef cf,pf,af,sf,of
11274 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11275 * document them as inputs. Sigh.
11276 */
11277FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11278{
11279 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11280 IEMOP_HLP_MIN_386();
11281 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11282 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11283 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11284}
11285
11286
11287/**
11288 * @opcode 0xbd
11289 * @oppfx 0xf3
11290 * @opfltest pf,af,sf,of
11291 * @opflmodify cf,pf,af,zf,sf,of
11292 * @opflundef pf,af,sf,of
11293 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11294 * document them as inputs. Sigh.
11295 */
11296FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11297{
11298 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11299 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11300 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11301
11302#ifndef TST_IEM_CHECK_MC
11303 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11304 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11305 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11306 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11307 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11308 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11309 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11310 {
11311 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11312 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11313 };
11314#endif
11315 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11316 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11317 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11319 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11320}
11321
11322
11323
11324/** Opcode 0x0f 0xbe. */
11325FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11326{
11327 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11328 IEMOP_HLP_MIN_386();
11329
11330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11331
11332 /*
11333 * If rm is denoting a register, no more instruction bytes.
11334 */
11335 if (IEM_IS_MODRM_REG_MODE(bRm))
11336 {
11337 switch (pVCpu->iem.s.enmEffOpSize)
11338 {
11339 case IEMMODE_16BIT:
11340 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11342 IEM_MC_LOCAL(uint16_t, u16Value);
11343 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11344 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11345 IEM_MC_ADVANCE_RIP_AND_FINISH();
11346 IEM_MC_END();
11347 break;
11348
11349 case IEMMODE_32BIT:
11350 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11352 IEM_MC_LOCAL(uint32_t, u32Value);
11353 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11354 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11355 IEM_MC_ADVANCE_RIP_AND_FINISH();
11356 IEM_MC_END();
11357 break;
11358
11359 case IEMMODE_64BIT:
11360 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11362 IEM_MC_LOCAL(uint64_t, u64Value);
11363 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11364 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11365 IEM_MC_ADVANCE_RIP_AND_FINISH();
11366 IEM_MC_END();
11367 break;
11368
11369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11370 }
11371 }
11372 else
11373 {
11374 /*
11375 * We're loading a register from memory.
11376 */
11377 switch (pVCpu->iem.s.enmEffOpSize)
11378 {
11379 case IEMMODE_16BIT:
11380 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11381 IEM_MC_LOCAL(uint16_t, u16Value);
11382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11385 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11386 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11387 IEM_MC_ADVANCE_RIP_AND_FINISH();
11388 IEM_MC_END();
11389 break;
11390
11391 case IEMMODE_32BIT:
11392 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11393 IEM_MC_LOCAL(uint32_t, u32Value);
11394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11397 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11398 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11399 IEM_MC_ADVANCE_RIP_AND_FINISH();
11400 IEM_MC_END();
11401 break;
11402
11403 case IEMMODE_64BIT:
11404 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11405 IEM_MC_LOCAL(uint64_t, u64Value);
11406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11409 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11410 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11411 IEM_MC_ADVANCE_RIP_AND_FINISH();
11412 IEM_MC_END();
11413 break;
11414
11415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11416 }
11417 }
11418}
11419
11420
11421/** Opcode 0x0f 0xbf. */
11422FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11423{
11424 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11425 IEMOP_HLP_MIN_386();
11426
11427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11428
11429 /** @todo Not entirely sure how the operand size prefix is handled here,
11430 * assuming that it will be ignored. Would be nice to have a few
11431 * test for this. */
11432 /*
11433 * If rm is denoting a register, no more instruction bytes.
11434 */
11435 if (IEM_IS_MODRM_REG_MODE(bRm))
11436 {
11437 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11438 {
11439 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11441 IEM_MC_LOCAL(uint32_t, u32Value);
11442 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11443 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11444 IEM_MC_ADVANCE_RIP_AND_FINISH();
11445 IEM_MC_END();
11446 }
11447 else
11448 {
11449 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11451 IEM_MC_LOCAL(uint64_t, u64Value);
11452 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11453 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11454 IEM_MC_ADVANCE_RIP_AND_FINISH();
11455 IEM_MC_END();
11456 }
11457 }
11458 else
11459 {
11460 /*
11461 * We're loading a register from memory.
11462 */
11463 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11464 {
11465 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11466 IEM_MC_LOCAL(uint32_t, u32Value);
11467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11470 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11471 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11472 IEM_MC_ADVANCE_RIP_AND_FINISH();
11473 IEM_MC_END();
11474 }
11475 else
11476 {
11477 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11478 IEM_MC_LOCAL(uint64_t, u64Value);
11479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11482 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11483 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11484 IEM_MC_ADVANCE_RIP_AND_FINISH();
11485 IEM_MC_END();
11486 }
11487 }
11488}
11489
11490
11491/**
11492 * @opcode 0xc0
11493 * @opflclass arithmetic
11494 */
11495FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11496{
11497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11498 IEMOP_HLP_MIN_486();
11499 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11500
11501 /*
11502 * If rm is denoting a register, no more instruction bytes.
11503 */
11504 if (IEM_IS_MODRM_REG_MODE(bRm))
11505 {
11506 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11508 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11509 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11510 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11511
11512 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11513 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11514 IEM_MC_REF_EFLAGS(pEFlags);
11515 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11516
11517 IEM_MC_ADVANCE_RIP_AND_FINISH();
11518 IEM_MC_END();
11519 }
11520 else
11521 {
11522 /*
11523 * We're accessing memory.
11524 */
11525#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11526 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11529 IEMOP_HLP_DONE_DECODING(); \
11530 \
11531 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11532 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11533 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11534 \
11535 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11536 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11537 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11538 \
11539 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11540 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11541 \
11542 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11543 IEM_MC_COMMIT_EFLAGS(EFlags); \
11544 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11545 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11546 IEM_MC_END()
11547 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11548 {
11549 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11550 }
11551 else
11552 {
11553 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11554 }
11555 }
11556}
11557
11558
11559/**
11560 * @opcode 0xc1
11561 * @opflclass arithmetic
11562 */
11563FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11564{
11565 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11566 IEMOP_HLP_MIN_486();
11567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11568
11569 /*
11570 * If rm is denoting a register, no more instruction bytes.
11571 */
11572 if (IEM_IS_MODRM_REG_MODE(bRm))
11573 {
11574 switch (pVCpu->iem.s.enmEffOpSize)
11575 {
11576 case IEMMODE_16BIT:
11577 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11579 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11580 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11581 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11582
11583 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11584 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11585 IEM_MC_REF_EFLAGS(pEFlags);
11586 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11587
11588 IEM_MC_ADVANCE_RIP_AND_FINISH();
11589 IEM_MC_END();
11590 break;
11591
11592 case IEMMODE_32BIT:
11593 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11595 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11596 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11597 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11598
11599 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11600 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11601 IEM_MC_REF_EFLAGS(pEFlags);
11602 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11603
11604 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11605 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11606 IEM_MC_ADVANCE_RIP_AND_FINISH();
11607 IEM_MC_END();
11608 break;
11609
11610 case IEMMODE_64BIT:
11611 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11613 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11614 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11615 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11616
11617 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11618 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11619 IEM_MC_REF_EFLAGS(pEFlags);
11620 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11621
11622 IEM_MC_ADVANCE_RIP_AND_FINISH();
11623 IEM_MC_END();
11624 break;
11625
11626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11627 }
11628 }
11629 else
11630 {
11631 /*
11632 * We're accessing memory.
11633 */
11634#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11635 do { \
11636 switch (pVCpu->iem.s.enmEffOpSize) \
11637 { \
11638 case IEMMODE_16BIT: \
11639 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11642 IEMOP_HLP_DONE_DECODING(); \
11643 \
11644 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11645 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11646 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11647 \
11648 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11649 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11650 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11651 \
11652 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11653 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11654 \
11655 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11656 IEM_MC_COMMIT_EFLAGS(EFlags); \
11657 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11658 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11659 IEM_MC_END(); \
11660 break; \
11661 \
11662 case IEMMODE_32BIT: \
11663 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11666 IEMOP_HLP_DONE_DECODING(); \
11667 \
11668 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11669 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11670 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11671 \
11672 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11673 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11674 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11675 \
11676 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11677 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11678 \
11679 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11680 IEM_MC_COMMIT_EFLAGS(EFlags); \
11681 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11682 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11683 IEM_MC_END(); \
11684 break; \
11685 \
11686 case IEMMODE_64BIT: \
11687 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11690 IEMOP_HLP_DONE_DECODING(); \
11691 \
11692 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11693 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11694 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11695 \
11696 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11697 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11698 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11699 \
11700 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11701 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11702 \
11703 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11704 IEM_MC_COMMIT_EFLAGS(EFlags); \
11705 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11706 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11707 IEM_MC_END(); \
11708 break; \
11709 \
11710 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11711 } \
11712 } while (0)
11713
11714 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11715 {
11716 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11717 }
11718 else
11719 {
11720 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11721 }
11722 }
11723}
11724
11725
11726/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11727FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11728{
11729 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11730
11731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11732 if (IEM_IS_MODRM_REG_MODE(bRm))
11733 {
11734 /*
11735 * XMM, XMM.
11736 */
11737 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11738 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11740 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11741 IEM_MC_LOCAL(X86XMMREG, Dst);
11742 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11743 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11744 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11745 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11746 IEM_MC_PREPARE_SSE_USAGE();
11747 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11748 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11749 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11750
11751 IEM_MC_ADVANCE_RIP_AND_FINISH();
11752 IEM_MC_END();
11753 }
11754 else
11755 {
11756 /*
11757 * XMM, [mem128].
11758 */
11759 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11760 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11761 IEM_MC_LOCAL(X86XMMREG, Dst);
11762 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11763 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11765
11766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11767 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11768 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11770 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11771 IEM_MC_PREPARE_SSE_USAGE();
11772
11773 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11774 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11775 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11776
11777 IEM_MC_ADVANCE_RIP_AND_FINISH();
11778 IEM_MC_END();
11779 }
11780}
11781
11782
11783/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11784FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11785{
11786 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11787
11788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11789 if (IEM_IS_MODRM_REG_MODE(bRm))
11790 {
11791 /*
11792 * XMM, XMM.
11793 */
11794 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11795 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11797 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11798 IEM_MC_LOCAL(X86XMMREG, Dst);
11799 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11800 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11801 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11802 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11803 IEM_MC_PREPARE_SSE_USAGE();
11804 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11805 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11806 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11807
11808 IEM_MC_ADVANCE_RIP_AND_FINISH();
11809 IEM_MC_END();
11810 }
11811 else
11812 {
11813 /*
11814 * XMM, [mem128].
11815 */
11816 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11817 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11818 IEM_MC_LOCAL(X86XMMREG, Dst);
11819 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11820 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11822
11823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11824 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11825 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11827 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11828 IEM_MC_PREPARE_SSE_USAGE();
11829
11830 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11831 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11832 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11833
11834 IEM_MC_ADVANCE_RIP_AND_FINISH();
11835 IEM_MC_END();
11836 }
11837}
11838
11839
11840/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11841FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11842{
11843 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11844
11845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11846 if (IEM_IS_MODRM_REG_MODE(bRm))
11847 {
11848 /*
11849 * XMM32, XMM32.
11850 */
11851 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11852 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11854 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11855 IEM_MC_LOCAL(X86XMMREG, Dst);
11856 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11857 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11858 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11859 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11860 IEM_MC_PREPARE_SSE_USAGE();
11861 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11862 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11863 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11864
11865 IEM_MC_ADVANCE_RIP_AND_FINISH();
11866 IEM_MC_END();
11867 }
11868 else
11869 {
11870 /*
11871 * XMM32, [mem32].
11872 */
11873 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11874 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11875 IEM_MC_LOCAL(X86XMMREG, Dst);
11876 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11877 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11879
11880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11881 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11882 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11884 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11885 IEM_MC_PREPARE_SSE_USAGE();
11886
11887 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11888 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11889 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11890 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11891
11892 IEM_MC_ADVANCE_RIP_AND_FINISH();
11893 IEM_MC_END();
11894 }
11895}
11896
11897
11898/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11899FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11900{
11901 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11902
11903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11904 if (IEM_IS_MODRM_REG_MODE(bRm))
11905 {
11906 /*
11907 * XMM64, XMM64.
11908 */
11909 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11910 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11912 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11913 IEM_MC_LOCAL(X86XMMREG, Dst);
11914 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11915 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11916 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11917 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11918 IEM_MC_PREPARE_SSE_USAGE();
11919 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11920 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11921 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11922
11923 IEM_MC_ADVANCE_RIP_AND_FINISH();
11924 IEM_MC_END();
11925 }
11926 else
11927 {
11928 /*
11929 * XMM64, [mem64].
11930 */
11931 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11932 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11933 IEM_MC_LOCAL(X86XMMREG, Dst);
11934 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11935 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11937
11938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11939 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11940 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11942 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11943 IEM_MC_PREPARE_SSE_USAGE();
11944
11945 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11946 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11947 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11948 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11949
11950 IEM_MC_ADVANCE_RIP_AND_FINISH();
11951 IEM_MC_END();
11952 }
11953}
11954
11955
11956/** Opcode 0x0f 0xc3. */
11957FNIEMOP_DEF(iemOp_movnti_My_Gy)
11958{
11959 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11960
11961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11962
11963 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11964 if (IEM_IS_MODRM_MEM_MODE(bRm))
11965 {
11966 switch (pVCpu->iem.s.enmEffOpSize)
11967 {
11968 case IEMMODE_32BIT:
11969 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11970 IEM_MC_LOCAL(uint32_t, u32Value);
11971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11972
11973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11975
11976 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11977 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11978 IEM_MC_ADVANCE_RIP_AND_FINISH();
11979 IEM_MC_END();
11980 break;
11981
11982 case IEMMODE_64BIT:
11983 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11984 IEM_MC_LOCAL(uint64_t, u64Value);
11985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11986
11987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11989
11990 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11991 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11992 IEM_MC_ADVANCE_RIP_AND_FINISH();
11993 IEM_MC_END();
11994 break;
11995
11996 case IEMMODE_16BIT:
11997 /** @todo check this form. */
11998 IEMOP_RAISE_INVALID_OPCODE_RET();
11999
12000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12001 }
12002 }
12003 else
12004 IEMOP_RAISE_INVALID_OPCODE_RET();
12005}
12006
12007
12008/* Opcode 0x66 0x0f 0xc3 - invalid */
12009/* Opcode 0xf3 0x0f 0xc3 - invalid */
12010/* Opcode 0xf2 0x0f 0xc3 - invalid */
12011
12012
12013/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12014FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12015{
12016 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12018 if (IEM_IS_MODRM_REG_MODE(bRm))
12019 {
12020 /*
12021 * Register, register.
12022 */
12023 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12024 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12025 IEM_MC_LOCAL(uint16_t, uValue);
12026
12027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12028 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12029 IEM_MC_PREPARE_FPU_USAGE();
12030 IEM_MC_FPU_TO_MMX_MODE();
12031
12032 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12033 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12034
12035 IEM_MC_ADVANCE_RIP_AND_FINISH();
12036 IEM_MC_END();
12037 }
12038 else
12039 {
12040 /*
12041 * Register, memory.
12042 */
12043 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12045 IEM_MC_LOCAL(uint16_t, uValue);
12046
12047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12048 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12050 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12051 IEM_MC_PREPARE_FPU_USAGE();
12052
12053 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12054 IEM_MC_FPU_TO_MMX_MODE();
12055 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12056
12057 IEM_MC_ADVANCE_RIP_AND_FINISH();
12058 IEM_MC_END();
12059 }
12060}
12061
12062
12063/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12064FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12065{
12066 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12068 if (IEM_IS_MODRM_REG_MODE(bRm))
12069 {
12070 /*
12071 * Register, register.
12072 */
12073 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12074 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12076
12077 IEM_MC_LOCAL(uint16_t, uValue);
12078 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12079 IEM_MC_PREPARE_SSE_USAGE();
12080
12081 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12082 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12083 IEM_MC_ADVANCE_RIP_AND_FINISH();
12084 IEM_MC_END();
12085 }
12086 else
12087 {
12088 /*
12089 * Register, memory.
12090 */
12091 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12093 IEM_MC_LOCAL(uint16_t, uValue);
12094
12095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12096 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12098 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12099 IEM_MC_PREPARE_SSE_USAGE();
12100
12101 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12102 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12103 IEM_MC_ADVANCE_RIP_AND_FINISH();
12104 IEM_MC_END();
12105 }
12106}
12107
12108
12109/* Opcode 0xf3 0x0f 0xc4 - invalid */
12110/* Opcode 0xf2 0x0f 0xc4 - invalid */
12111
12112
12113/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12114FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12115{
12116 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12118 if (IEM_IS_MODRM_REG_MODE(bRm))
12119 {
12120 /*
12121 * Greg32, MMX, imm8.
12122 */
12123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12124 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12126 IEM_MC_LOCAL(uint16_t, uValue);
12127 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12128 IEM_MC_PREPARE_FPU_USAGE();
12129 IEM_MC_FPU_TO_MMX_MODE();
12130 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12131 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12132 IEM_MC_ADVANCE_RIP_AND_FINISH();
12133 IEM_MC_END();
12134 }
12135 /* No memory operand. */
12136 else
12137 IEMOP_RAISE_INVALID_OPCODE_RET();
12138}
12139
12140
12141/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12142FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12143{
12144 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12146 if (IEM_IS_MODRM_REG_MODE(bRm))
12147 {
12148 /*
12149 * Greg32, XMM, imm8.
12150 */
12151 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12152 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12154 IEM_MC_LOCAL(uint16_t, uValue);
12155 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12156 IEM_MC_PREPARE_SSE_USAGE();
12157 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12158 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12159 IEM_MC_ADVANCE_RIP_AND_FINISH();
12160 IEM_MC_END();
12161 }
12162 /* No memory operand. */
12163 else
12164 IEMOP_RAISE_INVALID_OPCODE_RET();
12165}
12166
12167
12168/* Opcode 0xf3 0x0f 0xc5 - invalid */
12169/* Opcode 0xf2 0x0f 0xc5 - invalid */
12170
12171
12172/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12173FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12174{
12175 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12177 if (IEM_IS_MODRM_REG_MODE(bRm))
12178 {
12179 /*
12180 * XMM, XMM, imm8.
12181 */
12182 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12183 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12185 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12186 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12187 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12188 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12189 IEM_MC_PREPARE_SSE_USAGE();
12190 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12191 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12192 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12193 IEM_MC_ADVANCE_RIP_AND_FINISH();
12194 IEM_MC_END();
12195 }
12196 else
12197 {
12198 /*
12199 * XMM, [mem128], imm8.
12200 */
12201 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12202 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12203 IEM_MC_LOCAL(RTUINT128U, uSrc);
12204 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12206
12207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12208 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12209 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12211 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12212 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12213
12214 IEM_MC_PREPARE_SSE_USAGE();
12215 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12216 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12217
12218 IEM_MC_ADVANCE_RIP_AND_FINISH();
12219 IEM_MC_END();
12220 }
12221}
12222
12223
12224/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12225FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12226{
12227 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12229 if (IEM_IS_MODRM_REG_MODE(bRm))
12230 {
12231 /*
12232 * XMM, XMM, imm8.
12233 */
12234 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12235 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12237 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12238 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12239 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12240 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12241 IEM_MC_PREPARE_SSE_USAGE();
12242 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12243 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12245 IEM_MC_ADVANCE_RIP_AND_FINISH();
12246 IEM_MC_END();
12247 }
12248 else
12249 {
12250 /*
12251 * XMM, [mem128], imm8.
12252 */
12253 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12254 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12255 IEM_MC_LOCAL(RTUINT128U, uSrc);
12256 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12258
12259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12260 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12261 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12263 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12264 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12265
12266 IEM_MC_PREPARE_SSE_USAGE();
12267 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12268 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12269
12270 IEM_MC_ADVANCE_RIP_AND_FINISH();
12271 IEM_MC_END();
12272 }
12273}
12274
12275
12276/* Opcode 0xf3 0x0f 0xc6 - invalid */
12277/* Opcode 0xf2 0x0f 0xc6 - invalid */
12278
12279
12280/**
12281 * @opmaps grp9
12282 * @opcode /1
12283 * @opcodesub !11 mr/reg rex.w=0
12284 * @oppfx n/a
12285 * @opflmodify zf
12286 */
12287FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12288{
12289 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12290#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12294 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12295 \
12296 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12297 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12298 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12299 \
12300 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12301 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12302 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12303 \
12304 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12305 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12306 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12307 \
12308 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12309 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12310 \
12311 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12312 IEM_MC_COMMIT_EFLAGS(EFlags); \
12313 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12314 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12315 } IEM_MC_ENDIF(); \
12316 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12317 \
12318 IEM_MC_END()
12319 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12320 {
12321 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12322 }
12323 else
12324 {
12325 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12326 }
12327}
12328
12329
12330/**
12331 * @opmaps grp9
12332 * @opcode /1
12333 * @opcodesub !11 mr/reg rex.w=1
12334 * @oppfx n/a
12335 * @opflmodify zf
12336 */
12337FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12338{
12339 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12340 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12341 {
12342 /*
12343 * This is hairy, very hairy macro fun. We're walking a fine line
12344 * here to make the code parsable by IEMAllInstPython.py and fit into
12345 * the patterns IEMAllThrdPython.py requires for the code morphing.
12346 */
12347#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12348 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12351 IEMOP_HLP_DONE_DECODING(); \
12352 \
12353 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12354 bUnmapInfoStmt; \
12355 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12356 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12357 \
12358 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12359 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12360 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12361 \
12362 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12363 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12364 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12365 \
12366 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12367
12368#define BODY_CMPXCHG16B_TAIL(a_Type) \
12369 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12370 IEM_MC_COMMIT_EFLAGS(EFlags); \
12371 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12372 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12373 } IEM_MC_ENDIF(); \
12374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12375 IEM_MC_END()
12376
12377#ifdef RT_ARCH_AMD64
12378 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12379 {
12380 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12381 {
12382 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12383 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12384 BODY_CMPXCHG16B_TAIL(RW);
12385 }
12386 else
12387 {
12388 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12389 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12390 BODY_CMPXCHG16B_TAIL(ATOMIC);
12391 }
12392 }
12393 else
12394 { /* (see comments in #else case below) */
12395 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12396 {
12397 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12398 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12399 BODY_CMPXCHG16B_TAIL(RW);
12400 }
12401 else
12402 {
12403 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12404 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12405 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12406 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12407 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12408 pEFlags, bUnmapInfo);
12409 IEM_MC_END();
12410 }
12411 }
12412
12413#elif defined(RT_ARCH_ARM64)
12414 /** @todo may require fallback for unaligned accesses... */
12415 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12416 {
12417 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12418 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12419 BODY_CMPXCHG16B_TAIL(RW);
12420 }
12421 else
12422 {
12423 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12424 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12425 BODY_CMPXCHG16B_TAIL(ATOMIC);
12426 }
12427
12428#else
12429 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12430 accesses and not all all atomic, which works fine on in UNI CPU guest
12431 configuration (ignoring DMA). If guest SMP is active we have no choice
12432 but to use a rendezvous callback here. Sigh. */
12433 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12434 {
12435 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12436 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12437 BODY_CMPXCHG16B_TAIL(RW);
12438 }
12439 else
12440 {
12441 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12442 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12443 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12444 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12445 iemCImpl_cmpxchg16b_fallback_rendezvous,
12446 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12447 IEM_MC_END();
12448 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12449 }
12450#endif
12451
12452#undef BODY_CMPXCHG16B
12453 }
12454 Log(("cmpxchg16b -> #UD\n"));
12455 IEMOP_RAISE_INVALID_OPCODE_RET();
12456}
12457
12458FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12459{
12460 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12461 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12462 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12463}
12464
12465
12466/** Opcode 0x0f 0xc7 11/6. */
12467FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12468{
12469 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12470 IEMOP_RAISE_INVALID_OPCODE_RET();
12471
12472 if (IEM_IS_MODRM_REG_MODE(bRm))
12473 {
12474 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12476 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12477 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12478 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12479 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12480 iemCImpl_rdrand, iReg, enmEffOpSize);
12481 IEM_MC_END();
12482 }
12483 /* Register only. */
12484 else
12485 IEMOP_RAISE_INVALID_OPCODE_RET();
12486}
12487
12488/** Opcode 0x0f 0xc7 !11/6. */
12489#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12490FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12491{
12492 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12493 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12494 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12495 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12496 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12498 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12499 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12500 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12501 IEM_MC_END();
12502}
12503#else
12504FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12505#endif
12506
12507/** Opcode 0x66 0x0f 0xc7 !11/6. */
12508#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12509FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12510{
12511 IEMOP_MNEMONIC(vmclear, "vmclear");
12512 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12513 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12514 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12515 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12517 IEMOP_HLP_DONE_DECODING();
12518 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12519 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12520 IEM_MC_END();
12521}
12522#else
12523FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12524#endif
12525
12526/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12527#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12528FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12529{
12530 IEMOP_MNEMONIC(vmxon, "vmxon");
12531 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12532 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12533 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12535 IEMOP_HLP_DONE_DECODING();
12536 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12537 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12538 IEM_MC_END();
12539}
12540#else
12541FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12542#endif
12543
12544/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12545#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12546FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12547{
12548 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12549 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12550 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12551 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12552 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12554 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12555 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12556 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12557 IEM_MC_END();
12558}
12559#else
12560FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12561#endif
12562
12563/** Opcode 0x0f 0xc7 11/7. */
12564FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12565{
12566 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12567 IEMOP_RAISE_INVALID_OPCODE_RET();
12568
12569 if (IEM_IS_MODRM_REG_MODE(bRm))
12570 {
12571 /* register destination. */
12572 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12574 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12575 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12576 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12577 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12578 iemCImpl_rdseed, iReg, enmEffOpSize);
12579 IEM_MC_END();
12580 }
12581 /* Register only. */
12582 else
12583 IEMOP_RAISE_INVALID_OPCODE_RET();
12584}
12585
12586/**
12587 * Group 9 jump table for register variant.
12588 */
12589IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12590{ /* pfx: none, 066h, 0f3h, 0f2h */
12591 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12592 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12593 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12594 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12595 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12596 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12597 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12598 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12599};
12600AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12601
12602
12603/**
12604 * Group 9 jump table for memory variant.
12605 */
12606IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12607{ /* pfx: none, 066h, 0f3h, 0f2h */
12608 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12609 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12610 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12611 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12612 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12613 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12614 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12615 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12616};
12617AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12618
12619
12620/** Opcode 0x0f 0xc7. */
12621FNIEMOP_DEF(iemOp_Grp9)
12622{
12623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12624 if (IEM_IS_MODRM_REG_MODE(bRm))
12625 /* register, register */
12626 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12627 + pVCpu->iem.s.idxPrefix], bRm);
12628 /* memory, register */
12629 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12630 + pVCpu->iem.s.idxPrefix], bRm);
12631}
12632
12633
12634/**
12635 * Common 'bswap register' helper.
12636 */
12637FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12638{
12639 switch (pVCpu->iem.s.enmEffOpSize)
12640 {
12641 case IEMMODE_16BIT:
12642 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12644 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12645 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12646 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12647 IEM_MC_ADVANCE_RIP_AND_FINISH();
12648 IEM_MC_END();
12649 break;
12650
12651 case IEMMODE_32BIT:
12652 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12654 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12655 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12656 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12657 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12658 IEM_MC_ADVANCE_RIP_AND_FINISH();
12659 IEM_MC_END();
12660 break;
12661
12662 case IEMMODE_64BIT:
12663 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12665 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12666 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12667 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12668 IEM_MC_ADVANCE_RIP_AND_FINISH();
12669 IEM_MC_END();
12670 break;
12671
12672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12673 }
12674}
12675
12676
12677/** Opcode 0x0f 0xc8. */
12678FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12679{
12680 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12681 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12682 prefix. REX.B is the correct prefix it appears. For a parallel
12683 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12684 IEMOP_HLP_MIN_486();
12685 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12686}
12687
12688
12689/** Opcode 0x0f 0xc9. */
12690FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12691{
12692 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12693 IEMOP_HLP_MIN_486();
12694 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12695}
12696
12697
12698/** Opcode 0x0f 0xca. */
12699FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12700{
12701 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12702 IEMOP_HLP_MIN_486();
12703 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12704}
12705
12706
12707/** Opcode 0x0f 0xcb. */
12708FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12709{
12710 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12711 IEMOP_HLP_MIN_486();
12712 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12713}
12714
12715
12716/** Opcode 0x0f 0xcc. */
12717FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12718{
12719 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12720 IEMOP_HLP_MIN_486();
12721 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12722}
12723
12724
12725/** Opcode 0x0f 0xcd. */
12726FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12727{
12728 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12729 IEMOP_HLP_MIN_486();
12730 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12731}
12732
12733
12734/** Opcode 0x0f 0xce. */
12735FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12736{
12737 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12738 IEMOP_HLP_MIN_486();
12739 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12740}
12741
12742
12743/** Opcode 0x0f 0xcf. */
12744FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12745{
12746 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12747 IEMOP_HLP_MIN_486();
12748 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12749}
12750
12751
12752/* Opcode 0x0f 0xd0 - invalid */
12753
12754
12755/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12756FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12757{
12758 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12759 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12760}
12761
12762
12763/* Opcode 0xf3 0x0f 0xd0 - invalid */
12764
12765
12766/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12767FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12768{
12769 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12770 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12771}
12772
12773
12774
12775/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12776FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12777{
12778 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12779 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12780}
12781
12782/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12783FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12784{
12785 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12786 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12787}
12788
12789/* Opcode 0xf3 0x0f 0xd1 - invalid */
12790/* Opcode 0xf2 0x0f 0xd1 - invalid */
12791
12792/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12793FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12794{
12795 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12796 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12797}
12798
12799
12800/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12801FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12802{
12803 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12804 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12805}
12806
12807
12808/* Opcode 0xf3 0x0f 0xd2 - invalid */
12809/* Opcode 0xf2 0x0f 0xd2 - invalid */
12810
12811/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12812FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12813{
12814 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12815 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12816}
12817
12818
12819/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12820FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12821{
12822 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12823 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12824}
12825
12826
12827/* Opcode 0xf3 0x0f 0xd3 - invalid */
12828/* Opcode 0xf2 0x0f 0xd3 - invalid */
12829
12830
12831/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12832FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12833{
12834 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12835 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12836}
12837
12838
12839/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12840FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12841{
12842 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12843 SSE2_OPT_BODY_FullFull_To_Full(paddq, iemAImpl_paddq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12844}
12845
12846
12847/* Opcode 0xf3 0x0f 0xd4 - invalid */
12848/* Opcode 0xf2 0x0f 0xd4 - invalid */
12849
12850/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12851FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12852{
12853 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12854 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12855}
12856
12857/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12858FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12859{
12860 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12861 SSE2_OPT_BODY_FullFull_To_Full(pmullw, iemAImpl_pmullw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12862}
12863
12864
12865/* Opcode 0xf3 0x0f 0xd5 - invalid */
12866/* Opcode 0xf2 0x0f 0xd5 - invalid */
12867
12868/* Opcode 0x0f 0xd6 - invalid */
12869
12870/**
12871 * @opcode 0xd6
12872 * @oppfx 0x66
12873 * @opcpuid sse2
12874 * @opgroup og_sse2_pcksclr_datamove
12875 * @opxcpttype none
12876 * @optest op1=-1 op2=2 -> op1=2
12877 * @optest op1=0 op2=-42 -> op1=-42
12878 */
12879FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12880{
12881 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12883 if (IEM_IS_MODRM_REG_MODE(bRm))
12884 {
12885 /*
12886 * Register, register.
12887 */
12888 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12890 IEM_MC_LOCAL(uint64_t, uSrc);
12891
12892 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12893 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12894
12895 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12896 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12897
12898 IEM_MC_ADVANCE_RIP_AND_FINISH();
12899 IEM_MC_END();
12900 }
12901 else
12902 {
12903 /*
12904 * Memory, register.
12905 */
12906 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12907 IEM_MC_LOCAL(uint64_t, uSrc);
12908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12909
12910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12912 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12913 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12914
12915 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12916 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12917
12918 IEM_MC_ADVANCE_RIP_AND_FINISH();
12919 IEM_MC_END();
12920 }
12921}
12922
12923
12924/**
12925 * @opcode 0xd6
12926 * @opcodesub 11 mr/reg
12927 * @oppfx f3
12928 * @opcpuid sse2
12929 * @opgroup og_sse2_simdint_datamove
12930 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12931 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12932 */
12933FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12934{
12935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12936 if (IEM_IS_MODRM_REG_MODE(bRm))
12937 {
12938 /*
12939 * Register, register.
12940 */
12941 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12942 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12944 IEM_MC_LOCAL(uint64_t, uSrc);
12945
12946 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12947 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12948 IEM_MC_FPU_TO_MMX_MODE();
12949
12950 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12951 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12952
12953 IEM_MC_ADVANCE_RIP_AND_FINISH();
12954 IEM_MC_END();
12955 }
12956
12957 /**
12958 * @opdone
12959 * @opmnemonic udf30fd6mem
12960 * @opcode 0xd6
12961 * @opcodesub !11 mr/reg
12962 * @oppfx f3
12963 * @opunused intel-modrm
12964 * @opcpuid sse
12965 * @optest ->
12966 */
12967 else
12968 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12969}
12970
12971
12972/**
12973 * @opcode 0xd6
12974 * @opcodesub 11 mr/reg
12975 * @oppfx f2
12976 * @opcpuid sse2
12977 * @opgroup og_sse2_simdint_datamove
12978 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12979 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12980 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12981 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12982 * @optest op1=-42 op2=0xfedcba9876543210
12983 * -> op1=0xfedcba9876543210 ftw=0xff
12984 */
12985FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12986{
12987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12988 if (IEM_IS_MODRM_REG_MODE(bRm))
12989 {
12990 /*
12991 * Register, register.
12992 */
12993 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12994 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12996 IEM_MC_LOCAL(uint64_t, uSrc);
12997
12998 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12999 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13000 IEM_MC_FPU_TO_MMX_MODE();
13001
13002 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13003 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13004
13005 IEM_MC_ADVANCE_RIP_AND_FINISH();
13006 IEM_MC_END();
13007 }
13008
13009 /**
13010 * @opdone
13011 * @opmnemonic udf20fd6mem
13012 * @opcode 0xd6
13013 * @opcodesub !11 mr/reg
13014 * @oppfx f2
13015 * @opunused intel-modrm
13016 * @opcpuid sse
13017 * @optest ->
13018 */
13019 else
13020 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13021}
13022
13023
13024/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13025FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13026{
13027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13028 /* Docs says register only. */
13029 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13030 {
13031 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13032 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13033 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13035 IEM_MC_ARG(uint64_t *, puDst, 0);
13036 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13037 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13038 IEM_MC_PREPARE_FPU_USAGE();
13039 IEM_MC_FPU_TO_MMX_MODE();
13040
13041 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13042 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13043 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13044
13045 IEM_MC_ADVANCE_RIP_AND_FINISH();
13046 IEM_MC_END();
13047 }
13048 else
13049 IEMOP_RAISE_INVALID_OPCODE_RET();
13050}
13051
13052
13053/** Opcode 0x66 0x0f 0xd7 - */
13054FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13055{
13056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13057 /* Docs says register only. */
13058 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13059 {
13060 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13061 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13062 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13064 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13065 IEM_MC_PREPARE_SSE_USAGE();
13066 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
13067 IEM_MC_NATIVE_EMIT_2(iemNativeEmit_pmovmskb_rr_u128, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
13068 } IEM_MC_NATIVE_ELSE() {
13069 IEM_MC_ARG(uint64_t *, puDst, 0);
13070 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13071 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13072 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13073 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13074 } IEM_MC_NATIVE_ENDIF();
13075 IEM_MC_ADVANCE_RIP_AND_FINISH();
13076 IEM_MC_END();
13077 }
13078 else
13079 IEMOP_RAISE_INVALID_OPCODE_RET();
13080}
13081
13082
13083/* Opcode 0xf3 0x0f 0xd7 - invalid */
13084/* Opcode 0xf2 0x0f 0xd7 - invalid */
13085
13086
13087/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13088FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13089{
13090 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13091 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13092}
13093
13094
13095/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13096FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13097{
13098 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13099 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13100}
13101
13102
13103/* Opcode 0xf3 0x0f 0xd8 - invalid */
13104/* Opcode 0xf2 0x0f 0xd8 - invalid */
13105
13106/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13107FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13108{
13109 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13110 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13111}
13112
13113
13114/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13115FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13116{
13117 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13118 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13119}
13120
13121
13122/* Opcode 0xf3 0x0f 0xd9 - invalid */
13123/* Opcode 0xf2 0x0f 0xd9 - invalid */
13124
13125/** Opcode 0x0f 0xda - pminub Pq, Qq */
13126FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13127{
13128 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13129 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13130}
13131
13132
13133/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13134FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13135{
13136 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13137 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13138}
13139
13140/* Opcode 0xf3 0x0f 0xda - invalid */
13141/* Opcode 0xf2 0x0f 0xda - invalid */
13142
13143/** Opcode 0x0f 0xdb - pand Pq, Qq */
13144FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13145{
13146 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13147 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13148}
13149
13150
13151/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13152FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13153{
13154 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13155 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13156}
13157
13158
13159/* Opcode 0xf3 0x0f 0xdb - invalid */
13160/* Opcode 0xf2 0x0f 0xdb - invalid */
13161
13162/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13163FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13164{
13165 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13166 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13167}
13168
13169
13170/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13171FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13172{
13173 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13174 SSE2_OPT_BODY_FullFull_To_Full(paddusb, iemAImpl_paddusb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13175}
13176
13177
13178/* Opcode 0xf3 0x0f 0xdc - invalid */
13179/* Opcode 0xf2 0x0f 0xdc - invalid */
13180
13181/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13182FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13183{
13184 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13185 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13186}
13187
13188
13189/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13190FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13191{
13192 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13193 SSE2_OPT_BODY_FullFull_To_Full(paddusw, iemAImpl_paddusw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13194}
13195
13196
13197/* Opcode 0xf3 0x0f 0xdd - invalid */
13198/* Opcode 0xf2 0x0f 0xdd - invalid */
13199
13200/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13201FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13202{
13203 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13204 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13205}
13206
13207
13208/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13209FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13210{
13211 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13212 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13213}
13214
13215/* Opcode 0xf3 0x0f 0xde - invalid */
13216/* Opcode 0xf2 0x0f 0xde - invalid */
13217
13218
13219/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13220FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13221{
13222 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13223 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13224}
13225
13226
13227/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13228FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13229{
13230 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13231 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13232}
13233
13234
13235/* Opcode 0xf3 0x0f 0xdf - invalid */
13236/* Opcode 0xf2 0x0f 0xdf - invalid */
13237
13238/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13239FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13240{
13241 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13242 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13243}
13244
13245
13246/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13247FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13248{
13249 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13250 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13251}
13252
13253
13254/* Opcode 0xf3 0x0f 0xe0 - invalid */
13255/* Opcode 0xf2 0x0f 0xe0 - invalid */
13256
13257/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13258FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13259{
13260 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13261 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13262}
13263
13264
13265/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13266FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13267{
13268 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13269 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13270}
13271
13272
13273/* Opcode 0xf3 0x0f 0xe1 - invalid */
13274/* Opcode 0xf2 0x0f 0xe1 - invalid */
13275
13276/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13277FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13278{
13279 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13280 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13281}
13282
13283
13284/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13285FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13286{
13287 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13288 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13289}
13290
13291
13292/* Opcode 0xf3 0x0f 0xe2 - invalid */
13293/* Opcode 0xf2 0x0f 0xe2 - invalid */
13294
13295/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13296FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13297{
13298 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13299 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13300}
13301
13302
13303/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13304FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13305{
13306 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13307 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13308}
13309
13310
13311/* Opcode 0xf3 0x0f 0xe3 - invalid */
13312/* Opcode 0xf2 0x0f 0xe3 - invalid */
13313
13314/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13315FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13316{
13317 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13318 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13319}
13320
13321
13322/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13323FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13324{
13325 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13326 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13327}
13328
13329
13330/* Opcode 0xf3 0x0f 0xe4 - invalid */
13331/* Opcode 0xf2 0x0f 0xe4 - invalid */
13332
13333/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13334FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13335{
13336 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13337 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13338}
13339
13340
13341/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13342FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13343{
13344 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13345 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13346}
13347
13348
13349/* Opcode 0xf3 0x0f 0xe5 - invalid */
13350/* Opcode 0xf2 0x0f 0xe5 - invalid */
13351/* Opcode 0x0f 0xe6 - invalid */
13352
13353
13354/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13355FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13356{
13357 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13358 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13359}
13360
13361
13362/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13363FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13364{
13365 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13366 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13367}
13368
13369
13370/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13371FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13372{
13373 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13374 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13375}
13376
13377
13378/**
13379 * @opcode 0xe7
13380 * @opcodesub !11 mr/reg
13381 * @oppfx none
13382 * @opcpuid sse
13383 * @opgroup og_sse1_cachect
13384 * @opxcpttype none
13385 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13386 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13387 */
13388FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13389{
13390 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13392 if (IEM_IS_MODRM_MEM_MODE(bRm))
13393 {
13394 /* Register, memory. */
13395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13396 IEM_MC_LOCAL(uint64_t, uSrc);
13397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13398
13399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13401 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13402 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13403 IEM_MC_FPU_TO_MMX_MODE();
13404
13405 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13406 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13407
13408 IEM_MC_ADVANCE_RIP_AND_FINISH();
13409 IEM_MC_END();
13410 }
13411 /**
13412 * @opdone
13413 * @opmnemonic ud0fe7reg
13414 * @opcode 0xe7
13415 * @opcodesub 11 mr/reg
13416 * @oppfx none
13417 * @opunused immediate
13418 * @opcpuid sse
13419 * @optest ->
13420 */
13421 else
13422 IEMOP_RAISE_INVALID_OPCODE_RET();
13423}
13424
13425/**
13426 * @opcode 0xe7
13427 * @opcodesub !11 mr/reg
13428 * @oppfx 0x66
13429 * @opcpuid sse2
13430 * @opgroup og_sse2_cachect
13431 * @opxcpttype 1
13432 * @optest op1=-1 op2=2 -> op1=2
13433 * @optest op1=0 op2=-42 -> op1=-42
13434 */
13435FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13436{
13437 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13439 if (IEM_IS_MODRM_MEM_MODE(bRm))
13440 {
13441 /* Register, memory. */
13442 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13443 IEM_MC_LOCAL(RTUINT128U, uSrc);
13444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13445
13446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13448 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13449 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13450
13451 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13452 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13453
13454 IEM_MC_ADVANCE_RIP_AND_FINISH();
13455 IEM_MC_END();
13456 }
13457
13458 /**
13459 * @opdone
13460 * @opmnemonic ud660fe7reg
13461 * @opcode 0xe7
13462 * @opcodesub 11 mr/reg
13463 * @oppfx 0x66
13464 * @opunused immediate
13465 * @opcpuid sse
13466 * @optest ->
13467 */
13468 else
13469 IEMOP_RAISE_INVALID_OPCODE_RET();
13470}
13471
13472/* Opcode 0xf3 0x0f 0xe7 - invalid */
13473/* Opcode 0xf2 0x0f 0xe7 - invalid */
13474
13475
13476/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13477FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13478{
13479 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13480 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13481}
13482
13483
13484/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13485FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13486{
13487 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13488 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13489}
13490
13491
13492/* Opcode 0xf3 0x0f 0xe8 - invalid */
13493/* Opcode 0xf2 0x0f 0xe8 - invalid */
13494
13495/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13496FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13497{
13498 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13499 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13500}
13501
13502
13503/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13504FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13505{
13506 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13507 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13508}
13509
13510
13511/* Opcode 0xf3 0x0f 0xe9 - invalid */
13512/* Opcode 0xf2 0x0f 0xe9 - invalid */
13513
13514
13515/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13516FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13517{
13518 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13519 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13520}
13521
13522
13523/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13524FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13525{
13526 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13527 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13528}
13529
13530
13531/* Opcode 0xf3 0x0f 0xea - invalid */
13532/* Opcode 0xf2 0x0f 0xea - invalid */
13533
13534
13535/** Opcode 0x0f 0xeb - por Pq, Qq */
13536FNIEMOP_DEF(iemOp_por_Pq_Qq)
13537{
13538 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13539 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13540}
13541
13542
13543/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13544FNIEMOP_DEF(iemOp_por_Vx_Wx)
13545{
13546 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13547 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13548}
13549
13550
13551/* Opcode 0xf3 0x0f 0xeb - invalid */
13552/* Opcode 0xf2 0x0f 0xeb - invalid */
13553
13554/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13555FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13556{
13557 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13558 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13559}
13560
13561
13562/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13563FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13564{
13565 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13566 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13567}
13568
13569
13570/* Opcode 0xf3 0x0f 0xec - invalid */
13571/* Opcode 0xf2 0x0f 0xec - invalid */
13572
13573/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13574FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13575{
13576 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13577 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13578}
13579
13580
13581/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13582FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13583{
13584 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13585 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13586}
13587
13588
13589/* Opcode 0xf3 0x0f 0xed - invalid */
13590/* Opcode 0xf2 0x0f 0xed - invalid */
13591
13592
13593/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13594FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13595{
13596 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13597 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13598}
13599
13600
13601/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13602FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13603{
13604 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13605 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13606}
13607
13608
13609/* Opcode 0xf3 0x0f 0xee - invalid */
13610/* Opcode 0xf2 0x0f 0xee - invalid */
13611
13612
13613/** Opcode 0x0f 0xef - pxor Pq, Qq */
13614FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13615{
13616 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13617 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13618}
13619
13620
13621/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13622FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13623{
13624 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13625 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13626}
13627
13628
13629/* Opcode 0xf3 0x0f 0xef - invalid */
13630/* Opcode 0xf2 0x0f 0xef - invalid */
13631
13632/* Opcode 0x0f 0xf0 - invalid */
13633/* Opcode 0x66 0x0f 0xf0 - invalid */
13634
13635
13636/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13637FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13638{
13639 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13641 if (IEM_IS_MODRM_REG_MODE(bRm))
13642 {
13643 /*
13644 * Register, register - (not implemented, assuming it raises \#UD).
13645 */
13646 IEMOP_RAISE_INVALID_OPCODE_RET();
13647 }
13648 else
13649 {
13650 /*
13651 * Register, memory.
13652 */
13653 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13654 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13656
13657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13659 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13660 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13661 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13662 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13663
13664 IEM_MC_ADVANCE_RIP_AND_FINISH();
13665 IEM_MC_END();
13666 }
13667}
13668
13669
13670/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13671FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13672{
13673 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13674 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13675}
13676
13677
13678/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13679FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13680{
13681 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13682 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13683}
13684
13685
13686/* Opcode 0xf2 0x0f 0xf1 - invalid */
13687
13688/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13689FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13690{
13691 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13692 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13693}
13694
13695
13696/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13697FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13698{
13699 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13700 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13701}
13702
13703
13704/* Opcode 0xf2 0x0f 0xf2 - invalid */
13705
13706/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13707FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13708{
13709 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13710 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13711}
13712
13713
13714/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13715FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13716{
13717 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13718 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13719}
13720
13721/* Opcode 0xf2 0x0f 0xf3 - invalid */
13722
13723/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13724FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13725{
13726 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13727 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13728}
13729
13730
13731/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13732FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13733{
13734 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13735 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13736}
13737
13738
13739/* Opcode 0xf2 0x0f 0xf4 - invalid */
13740
13741/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13742FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13743{
13744 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13745 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13746}
13747
13748
13749/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13750FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13751{
13752 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13753 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13754}
13755
13756/* Opcode 0xf2 0x0f 0xf5 - invalid */
13757
13758/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13759FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13760{
13761 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13762 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13763}
13764
13765
13766/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13767FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13768{
13769 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13770 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13771}
13772
13773
13774/* Opcode 0xf2 0x0f 0xf6 - invalid */
13775
13776/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13777FNIEMOP_DEF(iemOp_maskmovq_Pq_Nq)
13778{
13779// IEMOP_MNEMONIC2(RM, MASKMOVQ, maskmovq, Pq, Nq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13781 if (IEM_IS_MODRM_REG_MODE(bRm))
13782 {
13783 /*
13784 * MMX, MMX, (implicit) [ ER]DI
13785 */
13786 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13788 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13789 IEM_MC_LOCAL( uint64_t, u64Mem);
13790 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Mem, u64Mem, 0);
13791 IEM_MC_ARG( uint64_t const *, puSrc, 1);
13792 IEM_MC_ARG( uint64_t const *, puMsk, 2);
13793 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13794 IEM_MC_PREPARE_FPU_USAGE();
13795 IEM_MC_FPU_TO_MMX_MODE();
13796
13797 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13798 IEM_MC_FETCH_MEM_U64(u64Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13799 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_REG_8(bRm));
13800 IEM_MC_REF_MREG_U64_CONST(puMsk, IEM_GET_MODRM_RM_8(bRm));
13801 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovq_u64, pu64Mem, puSrc, puMsk);
13802 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, u64EffAddr, u64Mem);
13803
13804 IEM_MC_ADVANCE_RIP_AND_FINISH();
13805 IEM_MC_END();
13806 }
13807 else
13808 {
13809 /* The memory, register encoding is invalid. */
13810 IEMOP_RAISE_INVALID_OPCODE_RET();
13811 }
13812}
13813
13814
13815/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13816FNIEMOP_DEF(iemOp_maskmovdqu_Vdq_Udq)
13817{
13818// IEMOP_MNEMONIC2(RM, MASKMOVDQU, maskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13820 if (IEM_IS_MODRM_REG_MODE(bRm))
13821 {
13822 /*
13823 * XMM, XMM, (implicit) [ ER]DI
13824 */
13825 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13827 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13828 IEM_MC_LOCAL( RTUINT128U, u128Mem);
13829 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
13830 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
13831 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
13832 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13833 IEM_MC_PREPARE_SSE_USAGE();
13834
13835 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13836 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13837 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13838 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
13839 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
13840 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
13841
13842 IEM_MC_ADVANCE_RIP_AND_FINISH();
13843 IEM_MC_END();
13844 }
13845 else
13846 {
13847 /* The memory, register encoding is invalid. */
13848 IEMOP_RAISE_INVALID_OPCODE_RET();
13849 }
13850}
13851
13852
13853/* Opcode 0xf2 0x0f 0xf7 - invalid */
13854
13855
13856/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13857FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13858{
13859 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13860 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13861}
13862
13863
13864/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13865FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13866{
13867 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13868 SSE2_OPT_BODY_FullFull_To_Full(psubb, iemAImpl_psubb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13869}
13870
13871
13872/* Opcode 0xf2 0x0f 0xf8 - invalid */
13873
13874
13875/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13876FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13877{
13878 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13879 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13880}
13881
13882
13883/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13884FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13885{
13886 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13887 SSE2_OPT_BODY_FullFull_To_Full(psubw, iemAImpl_psubw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13888}
13889
13890
13891/* Opcode 0xf2 0x0f 0xf9 - invalid */
13892
13893
13894/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13895FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13896{
13897 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13898 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13899}
13900
13901
13902/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13903FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13904{
13905 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13906 SSE2_OPT_BODY_FullFull_To_Full(psubd, iemAImpl_psubd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13907}
13908
13909
13910/* Opcode 0xf2 0x0f 0xfa - invalid */
13911
13912
13913/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13914FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13915{
13916 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13917 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13918}
13919
13920
13921/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13922FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13923{
13924 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13925 SSE2_OPT_BODY_FullFull_To_Full(psubq, iemAImpl_psubq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13926}
13927
13928
13929/* Opcode 0xf2 0x0f 0xfb - invalid */
13930
13931
13932/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13933FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13934{
13935 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13936 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
13937}
13938
13939
13940/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13941FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13942{
13943 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13944 SSE2_OPT_BODY_FullFull_To_Full(paddb, iemAImpl_paddb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13945}
13946
13947
13948/* Opcode 0xf2 0x0f 0xfc - invalid */
13949
13950
13951/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13952FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13953{
13954 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13955 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
13956}
13957
13958
13959/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13960FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13961{
13962 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13963 SSE2_OPT_BODY_FullFull_To_Full(paddw, iemAImpl_paddw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13964}
13965
13966
13967/* Opcode 0xf2 0x0f 0xfd - invalid */
13968
13969
13970/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13971FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13972{
13973 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13974 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
13975}
13976
13977
13978/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13979FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13980{
13981 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13982 SSE2_OPT_BODY_FullFull_To_Full(paddd, iemAImpl_paddd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13983}
13984
13985
13986/* Opcode 0xf2 0x0f 0xfe - invalid */
13987
13988
13989/** Opcode **** 0x0f 0xff - UD0 */
13990FNIEMOP_DEF(iemOp_ud0)
13991{
13992 IEMOP_MNEMONIC(ud0, "ud0");
13993 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13994 {
13995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13996 if (IEM_IS_MODRM_MEM_MODE(bRm))
13997 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
13998 }
13999 IEMOP_HLP_DONE_DECODING();
14000 IEMOP_RAISE_INVALID_OPCODE_RET();
14001}
14002
14003
14004
14005/**
14006 * Two byte opcode map, first byte 0x0f.
14007 *
14008 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14009 * check if it needs updating as well when making changes.
14010 */
14011const PFNIEMOP g_apfnTwoByteMap[] =
14012{
14013 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14014 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14015 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14016 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14017 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14018 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14019 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14020 /* 0x06 */ IEMOP_X4(iemOp_clts),
14021 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14022 /* 0x08 */ IEMOP_X4(iemOp_invd),
14023 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14024 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14025 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14026 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14027 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14028 /* 0x0e */ IEMOP_X4(iemOp_femms),
14029 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14030
14031 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14032 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14033 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14034 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14035 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14036 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14037 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14038 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14039 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14040 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14041 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14042 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14043 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14044 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14045 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14046 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14047
14048 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14049 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14050 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14051 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14052 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14053 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14054 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14055 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14056 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14057 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14058 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14059 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14060 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14061 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14062 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14063 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14064
14065 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14066 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14067 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14068 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14069 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14070 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14071 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14072 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14073 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14074 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14075 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14076 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14077 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14078 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14079 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14080 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14081
14082 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14083 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14084 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14085 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14086 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14087 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14088 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14089 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14090 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14091 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14092 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14093 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14094 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14095 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14096 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14097 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14098
14099 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14100 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14101 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14102 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14103 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14104 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14105 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14106 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14107 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14108 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14109 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14110 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14111 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14112 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14113 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14114 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14115
14116 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14118 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14119 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14120 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14121 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14122 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14124 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14125 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14126 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14127 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14128 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14129 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14130 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14131 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14132
14133 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14134 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14135 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14136 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14137 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14138 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14139 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14140 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14141
14142 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14143 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14144 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14145 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14146 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14147 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14148 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14149 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14150
14151 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14152 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14153 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14154 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14155 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14156 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14157 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14158 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14159 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14160 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14161 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14162 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14163 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14164 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14165 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14166 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14167
14168 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14169 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14170 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14171 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14172 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14173 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14174 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14175 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14176 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14177 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14178 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14179 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14180 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14181 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14182 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14183 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14184
14185 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14186 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14187 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14188 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14189 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14190 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14191 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14192 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14193 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14194 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14195 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14196 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14197 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14198 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14199 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14200 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14201
14202 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14203 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14204 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14205 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14206 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14207 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14208 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14209 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14210 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14211 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14212 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14213 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14214 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14215 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14216 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14217 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14218
14219 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14220 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14221 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14222 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14223 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14224 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14225 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14226 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14227 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14228 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14229 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14230 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14231 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14232 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14233 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14234 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14235
14236 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14237 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14238 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14239 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14240 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14241 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14242 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14243 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14244 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14245 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14246 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14247 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14248 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14249 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14250 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14251 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14252
14253 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14254 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14255 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14256 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14257 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14258 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14259 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14260 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14261 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14262 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14263 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14264 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14265 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14266 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14267 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14268 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14269
14270 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14271 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14272 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14273 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14274 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14275 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14276 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14277 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14278 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14279 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14280 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14281 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14282 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14283 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14284 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14285 /* 0xff */ IEMOP_X4(iemOp_ud0),
14286};
14287AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14288
14289/** @} */
14290
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette