VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 104136

Last change on this file since 104136 was 104129, checked in by vboxsync, 13 months ago

VMM/IEM: Rework MXCSR handling for SSE instructions, bugref:10641

The old approach by referencing the X86FXSTATE and accessing the MXCSR value there
prevents us from keeping the MXCSR shadowed in a host register for SIMD guest code
causing unecessary memory accesses. It also prevents avoiding skipping dirty guest registers
because the instruction helpers would have access the to CPUMCTX structure.

The new approach passes the guest MXCSR as the first argument of the helper callback and
the helper returns the MXCSR with the new exception flags being set as a return value.
With this the helpers only work on arguments supplied and don't access anything in CPUMCTX
directly which allows the recompiler to avoid flushing pending register writes unless they get
used.

As a bonus this also gets rid of the IEMSSERESULT struct which was required because the helpers
are restricted to 4 arguments due to restrictions on x86 for the assembly helpers in IEMAllAImpl.asm

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 514.5 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 104129 2024-04-02 12:37:36Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * Common worker for MMX instructions on the forms:
337 * pxxxx mm1, mm2/mem32
338 *
339 * The 2nd operand is the first half of a register, which in the memory case
340 * means a 32-bit memory access.
341 */
342FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
343{
344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
345 if (IEM_IS_MODRM_REG_MODE(bRm))
346 {
347 /*
348 * MMX, MMX.
349 */
350 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
352 IEM_MC_ARG(uint64_t *, puDst, 0);
353 IEM_MC_ARG(uint64_t const *, puSrc, 1);
354 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
355 IEM_MC_PREPARE_FPU_USAGE();
356 IEM_MC_FPU_TO_MMX_MODE();
357
358 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
359 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
360 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
361 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
362
363 IEM_MC_ADVANCE_RIP_AND_FINISH();
364 IEM_MC_END();
365 }
366 else
367 {
368 /*
369 * MMX, [mem32].
370 */
371 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
372 IEM_MC_ARG(uint64_t *, puDst, 0);
373 IEM_MC_LOCAL(uint64_t, uSrc);
374 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
376
377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
379 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
380 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
381
382 IEM_MC_PREPARE_FPU_USAGE();
383 IEM_MC_FPU_TO_MMX_MODE();
384
385 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
386 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
387 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
388
389 IEM_MC_ADVANCE_RIP_AND_FINISH();
390 IEM_MC_END();
391 }
392}
393
394
395/**
396 * Common worker for SSE instructions on the forms:
397 * pxxxx xmm1, xmm2/mem128
398 *
399 * The 2nd operand is the first half of a register, which in the memory case
400 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
401 *
402 * Exceptions type 4.
403 */
404FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
405{
406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
407 if (IEM_IS_MODRM_REG_MODE(bRm))
408 {
409 /*
410 * XMM, XMM.
411 */
412 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
414 IEM_MC_ARG(PRTUINT128U, puDst, 0);
415 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
416 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
418 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
419 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
420 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
421 IEM_MC_ADVANCE_RIP_AND_FINISH();
422 IEM_MC_END();
423 }
424 else
425 {
426 /*
427 * XMM, [mem128].
428 */
429 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
430 IEM_MC_ARG(PRTUINT128U, puDst, 0);
431 IEM_MC_LOCAL(RTUINT128U, uSrc);
432 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
434
435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
438 /** @todo Most CPUs probably only read the low qword. We read everything to
439 * make sure we apply segmentation and alignment checks correctly.
440 * When we have time, it would be interesting to explore what real
441 * CPUs actually does and whether it will do a TLB load for the high
442 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
443 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
444
445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
446 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
447 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
448
449 IEM_MC_ADVANCE_RIP_AND_FINISH();
450 IEM_MC_END();
451 }
452}
453
454
455/**
456 * Common worker for SSE2 instructions on the forms:
457 * pxxxx xmm1, xmm2/mem128
458 *
459 * The 2nd operand is the first half of a register, which in the memory case
460 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
461 *
462 * Exceptions type 4.
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, puDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
478 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(PRTUINT128U, puDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 /** @todo Most CPUs probably only read the low qword. We read everything to
499 * make sure we apply segmentation and alignment checks correctly.
500 * When we have time, it would be interesting to explore what real
501 * CPUs actually does and whether it will do a TLB load for the high
502 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
503 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
504
505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
506 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
507 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
508
509 IEM_MC_ADVANCE_RIP_AND_FINISH();
510 IEM_MC_END();
511 }
512}
513
514
515/**
516 * Common worker for MMX instructions on the form:
517 * pxxxx mm1, mm2/mem64
518 *
519 * The 2nd operand is the second half of a register, which in the memory case
520 * means a 64-bit memory access for MMX.
521 */
522FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
523{
524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
525 if (IEM_IS_MODRM_REG_MODE(bRm))
526 {
527 /*
528 * MMX, MMX.
529 */
530 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
531 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
532 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
534 IEM_MC_ARG(uint64_t *, puDst, 0);
535 IEM_MC_ARG(uint64_t const *, puSrc, 1);
536 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
537 IEM_MC_PREPARE_FPU_USAGE();
538 IEM_MC_FPU_TO_MMX_MODE();
539
540 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
541 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
542 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
543 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
544
545 IEM_MC_ADVANCE_RIP_AND_FINISH();
546 IEM_MC_END();
547 }
548 else
549 {
550 /*
551 * MMX, [mem64].
552 */
553 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
554 IEM_MC_ARG(uint64_t *, puDst, 0);
555 IEM_MC_LOCAL(uint64_t, uSrc);
556 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
558
559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
561 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
562 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
563
564 IEM_MC_PREPARE_FPU_USAGE();
565 IEM_MC_FPU_TO_MMX_MODE();
566
567 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
568 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
569 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
570
571 IEM_MC_ADVANCE_RIP_AND_FINISH();
572 IEM_MC_END();
573 }
574}
575
576
577/**
578 * Common worker for SSE instructions on the form:
579 * pxxxx xmm1, xmm2/mem128
580 *
581 * The 2nd operand is the second half of a register, which for SSE a 128-bit
582 * aligned access where it may read the full 128 bits or only the upper 64 bits.
583 *
584 * Exceptions type 4.
585 */
586FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
587{
588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
589 if (IEM_IS_MODRM_REG_MODE(bRm))
590 {
591 /*
592 * XMM, XMM.
593 */
594 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
596 IEM_MC_ARG(PRTUINT128U, puDst, 0);
597 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
599 IEM_MC_PREPARE_SSE_USAGE();
600 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
601 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
602 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
603 IEM_MC_ADVANCE_RIP_AND_FINISH();
604 IEM_MC_END();
605 }
606 else
607 {
608 /*
609 * XMM, [mem128].
610 */
611 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
612 IEM_MC_ARG(PRTUINT128U, puDst, 0);
613 IEM_MC_LOCAL(RTUINT128U, uSrc);
614 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
616
617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
619 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
620 /** @todo Most CPUs probably only read the high qword. We read everything to
621 * make sure we apply segmentation and alignment checks correctly.
622 * When we have time, it would be interesting to explore what real
623 * CPUs actually does and whether it will do a TLB load for the lower
624 * part or skip any associated \#PF. */
625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
626
627 IEM_MC_PREPARE_SSE_USAGE();
628 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
629 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
630
631 IEM_MC_ADVANCE_RIP_AND_FINISH();
632 IEM_MC_END();
633 }
634}
635
636
637/**
638 * Common worker for SSE instructions on the forms:
639 * pxxs xmm1, xmm2/mem128
640 *
641 * Proper alignment of the 128-bit operand is enforced.
642 * Exceptions type 2. SSE cpuid checks.
643 *
644 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
645 */
646FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
647{
648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
649 if (IEM_IS_MODRM_REG_MODE(bRm))
650 {
651 /*
652 * XMM128, XMM128.
653 */
654 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
656 IEM_MC_LOCAL(X86XMMREG, SseRes);
657 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
658 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
659 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
660 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
661 IEM_MC_PREPARE_SSE_USAGE();
662 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
663 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
664 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
665 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
666 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
667
668 IEM_MC_ADVANCE_RIP_AND_FINISH();
669 IEM_MC_END();
670 }
671 else
672 {
673 /*
674 * XMM128, [mem128].
675 */
676 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
677 IEM_MC_LOCAL(X86XMMREG, SseRes);
678 IEM_MC_LOCAL(X86XMMREG, uSrc2);
679 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
680 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
681 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
683
684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
686 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
687 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
688
689 IEM_MC_PREPARE_SSE_USAGE();
690 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
691 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
692 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
693 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
694
695 IEM_MC_ADVANCE_RIP_AND_FINISH();
696 IEM_MC_END();
697 }
698}
699
700
701/**
702 * Common worker for SSE instructions on the forms:
703 * pxxs xmm1, xmm2/mem32
704 *
705 * Proper alignment of the 128-bit operand is enforced.
706 * Exceptions type 2. SSE cpuid checks.
707 *
708 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
709 */
710FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
711{
712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
713 if (IEM_IS_MODRM_REG_MODE(bRm))
714 {
715 /*
716 * XMM128, XMM32.
717 */
718 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
720 IEM_MC_LOCAL(X86XMMREG, SseRes);
721 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
722 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
723 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
724 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
725 IEM_MC_PREPARE_SSE_USAGE();
726 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
727 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
728 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
729 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
730 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
731
732 IEM_MC_ADVANCE_RIP_AND_FINISH();
733 IEM_MC_END();
734 }
735 else
736 {
737 /*
738 * XMM128, [mem32].
739 */
740 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
741 IEM_MC_LOCAL(X86XMMREG, SseRes);
742 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
743 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
744 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
745 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
747
748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
750 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
751 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
752
753 IEM_MC_PREPARE_SSE_USAGE();
754 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
755 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
756 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
757 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
758
759 IEM_MC_ADVANCE_RIP_AND_FINISH();
760 IEM_MC_END();
761 }
762}
763
764
765/**
766 * Common worker for SSE2 instructions on the forms:
767 * pxxd xmm1, xmm2/mem128
768 *
769 * Proper alignment of the 128-bit operand is enforced.
770 * Exceptions type 2. SSE cpuid checks.
771 *
772 * @sa iemOpCommonSseFp_FullFull_To_Full
773 */
774FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
775{
776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
777 if (IEM_IS_MODRM_REG_MODE(bRm))
778 {
779 /*
780 * XMM128, XMM128.
781 */
782 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
784 IEM_MC_LOCAL(X86XMMREG, SseRes);
785 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
786 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
787 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
788 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
789 IEM_MC_PREPARE_SSE_USAGE();
790 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
791 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
792 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
793 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
794 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
795
796 IEM_MC_ADVANCE_RIP_AND_FINISH();
797 IEM_MC_END();
798 }
799 else
800 {
801 /*
802 * XMM128, [mem128].
803 */
804 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
805 IEM_MC_LOCAL(X86XMMREG, SseRes);
806 IEM_MC_LOCAL(X86XMMREG, uSrc2);
807 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
808 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
809 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
811
812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
814 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
815 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
816
817 IEM_MC_PREPARE_SSE_USAGE();
818 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
819 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
820 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
821 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
822
823 IEM_MC_ADVANCE_RIP_AND_FINISH();
824 IEM_MC_END();
825 }
826}
827
828
829/**
830 * Common worker for SSE2 instructions on the forms:
831 * pxxs xmm1, xmm2/mem64
832 *
833 * Proper alignment of the 128-bit operand is enforced.
834 * Exceptions type 2. SSE2 cpuid checks.
835 *
836 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
837 */
838FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
839{
840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
841 if (IEM_IS_MODRM_REG_MODE(bRm))
842 {
843 /*
844 * XMM, XMM.
845 */
846 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
848 IEM_MC_LOCAL(X86XMMREG, SseRes);
849 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
850 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
851 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
853 IEM_MC_PREPARE_SSE_USAGE();
854 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
855 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
856 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
857 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
858 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
859
860 IEM_MC_ADVANCE_RIP_AND_FINISH();
861 IEM_MC_END();
862 }
863 else
864 {
865 /*
866 * XMM, [mem64].
867 */
868 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
869 IEM_MC_LOCAL(X86XMMREG, SseRes);
870 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
871 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
872 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
873 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
875
876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
878 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
879 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
880
881 IEM_MC_PREPARE_SSE_USAGE();
882 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
883 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
884 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
885 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
886
887 IEM_MC_ADVANCE_RIP_AND_FINISH();
888 IEM_MC_END();
889 }
890}
891
892
893/**
894 * Common worker for SSE2 instructions on the form:
895 * pxxxx xmm1, xmm2/mem128
896 *
897 * The 2nd operand is the second half of a register, which for SSE a 128-bit
898 * aligned access where it may read the full 128 bits or only the upper 64 bits.
899 *
900 * Exceptions type 4.
901 */
902FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
903{
904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
905 if (IEM_IS_MODRM_REG_MODE(bRm))
906 {
907 /*
908 * XMM, XMM.
909 */
910 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
912 IEM_MC_ARG(PRTUINT128U, puDst, 0);
913 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
914 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
915 IEM_MC_PREPARE_SSE_USAGE();
916 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
917 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
918 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
919 IEM_MC_ADVANCE_RIP_AND_FINISH();
920 IEM_MC_END();
921 }
922 else
923 {
924 /*
925 * XMM, [mem128].
926 */
927 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
928 IEM_MC_ARG(PRTUINT128U, puDst, 0);
929 IEM_MC_LOCAL(RTUINT128U, uSrc);
930 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
932
933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
935 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
936 /** @todo Most CPUs probably only read the high qword. We read everything to
937 * make sure we apply segmentation and alignment checks correctly.
938 * When we have time, it would be interesting to explore what real
939 * CPUs actually does and whether it will do a TLB load for the lower
940 * part or skip any associated \#PF. */
941 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
942
943 IEM_MC_PREPARE_SSE_USAGE();
944 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
945 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
946
947 IEM_MC_ADVANCE_RIP_AND_FINISH();
948 IEM_MC_END();
949 }
950}
951
952
953/**
954 * Common worker for SSE3 instructions on the forms:
955 * hxxx xmm1, xmm2/mem128
956 *
957 * Proper alignment of the 128-bit operand is enforced.
958 * Exceptions type 2. SSE3 cpuid checks.
959 *
960 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
961 */
962FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
963{
964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
965 if (IEM_IS_MODRM_REG_MODE(bRm))
966 {
967 /*
968 * XMM, XMM.
969 */
970 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
972 IEM_MC_LOCAL(X86XMMREG, SseRes);
973 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
974 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
975 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
977 IEM_MC_PREPARE_SSE_USAGE();
978 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
979 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
980 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
981 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
982 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
983
984 IEM_MC_ADVANCE_RIP_AND_FINISH();
985 IEM_MC_END();
986 }
987 else
988 {
989 /*
990 * XMM, [mem128].
991 */
992 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
993 IEM_MC_LOCAL(X86XMMREG, SseRes);
994 IEM_MC_LOCAL(X86XMMREG, uSrc2);
995 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
996 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
997 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
999
1000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1002 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1003 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1004
1005 IEM_MC_PREPARE_SSE_USAGE();
1006 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1007 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1008 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1009 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1010
1011 IEM_MC_ADVANCE_RIP_AND_FINISH();
1012 IEM_MC_END();
1013 }
1014}
1015
1016
1017/** Opcode 0x0f 0x00 /0. */
1018FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1019{
1020 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1021 IEMOP_HLP_MIN_286();
1022 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1023
1024 if (IEM_IS_MODRM_REG_MODE(bRm))
1025 {
1026 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1027 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1028 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1029 }
1030
1031 /* Ignore operand size here, memory refs are always 16-bit. */
1032 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1033 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1035 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1036 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1037 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1038 IEM_MC_END();
1039}
1040
1041
1042/** Opcode 0x0f 0x00 /1. */
1043FNIEMOPRM_DEF(iemOp_Grp6_str)
1044{
1045 IEMOP_MNEMONIC(str, "str Rv/Mw");
1046 IEMOP_HLP_MIN_286();
1047 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1048
1049
1050 if (IEM_IS_MODRM_REG_MODE(bRm))
1051 {
1052 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1053 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1054 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1055 }
1056
1057 /* Ignore operand size here, memory refs are always 16-bit. */
1058 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1059 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1061 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1062 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1063 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1064 IEM_MC_END();
1065}
1066
1067
1068/** Opcode 0x0f 0x00 /2. */
1069FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1070{
1071 IEMOP_MNEMONIC(lldt, "lldt Ew");
1072 IEMOP_HLP_MIN_286();
1073 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1074
1075 if (IEM_IS_MODRM_REG_MODE(bRm))
1076 {
1077 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1078 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1079 IEM_MC_ARG(uint16_t, u16Sel, 0);
1080 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1081 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1082 IEM_MC_END();
1083 }
1084 else
1085 {
1086 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1087 IEM_MC_ARG(uint16_t, u16Sel, 0);
1088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1090 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1091 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1092 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1093 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1094 IEM_MC_END();
1095 }
1096}
1097
1098
1099/** Opcode 0x0f 0x00 /3. */
1100FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1101{
1102 IEMOP_MNEMONIC(ltr, "ltr Ew");
1103 IEMOP_HLP_MIN_286();
1104 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1105
1106 if (IEM_IS_MODRM_REG_MODE(bRm))
1107 {
1108 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1110 IEM_MC_ARG(uint16_t, u16Sel, 0);
1111 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1112 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1113 IEM_MC_END();
1114 }
1115 else
1116 {
1117 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1118 IEM_MC_ARG(uint16_t, u16Sel, 0);
1119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1122 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1123 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1124 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1125 IEM_MC_END();
1126 }
1127}
1128
1129
1130/* Need to associate flag info with the blocks, so duplicate the code. */
1131#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1132 IEMOP_HLP_MIN_286(); \
1133 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1134 \
1135 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1136 { \
1137 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1138 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1139 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1140 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1141 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1142 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1143 IEM_MC_END(); \
1144 } \
1145 else \
1146 { \
1147 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1148 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1149 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1153 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1154 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1155 IEM_MC_END(); \
1156 } (void)0
1157
1158/**
1159 * @opmaps grp6
1160 * @opcode /4
1161 * @opflmodify zf
1162 */
1163FNIEMOPRM_DEF(iemOp_Grp6_verr)
1164{
1165 IEMOP_MNEMONIC(verr, "verr Ew");
1166 IEMOP_BODY_GRP6_VERX(bRm, false);
1167}
1168
1169
1170/**
1171 * @opmaps grp6
1172 * @opcode /5
1173 * @opflmodify zf
1174 */
1175FNIEMOPRM_DEF(iemOp_Grp6_verw)
1176{
1177 IEMOP_MNEMONIC(verw, "verw Ew");
1178 IEMOP_BODY_GRP6_VERX(bRm, true);
1179}
1180
1181
1182/**
1183 * Group 6 jump table.
1184 */
1185IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1186{
1187 iemOp_Grp6_sldt,
1188 iemOp_Grp6_str,
1189 iemOp_Grp6_lldt,
1190 iemOp_Grp6_ltr,
1191 iemOp_Grp6_verr,
1192 iemOp_Grp6_verw,
1193 iemOp_InvalidWithRM,
1194 iemOp_InvalidWithRM
1195};
1196
1197/** Opcode 0x0f 0x00. */
1198FNIEMOP_DEF(iemOp_Grp6)
1199{
1200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1201 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1202}
1203
1204
1205/** Opcode 0x0f 0x01 /0. */
1206FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1207{
1208 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1209 IEMOP_HLP_MIN_286();
1210 IEMOP_HLP_64BIT_OP_SIZE();
1211 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1212 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1215 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1216 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1217 IEM_MC_END();
1218}
1219
1220
1221/** Opcode 0x0f 0x01 /0. */
1222FNIEMOP_DEF(iemOp_Grp7_vmcall)
1223{
1224 IEMOP_MNEMONIC(vmcall, "vmcall");
1225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1226
1227 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1228 want all hypercalls regardless of instruction used, and if a
1229 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1230 (NEM/win makes ASSUMPTIONS about this behavior.) */
1231 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1232}
1233
1234
1235/** Opcode 0x0f 0x01 /0. */
1236#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1237FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1238{
1239 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1240 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1241 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1242 IEMOP_HLP_DONE_DECODING();
1243 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1244 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1245 iemCImpl_vmlaunch);
1246}
1247#else
1248FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1249{
1250 IEMOP_BITCH_ABOUT_STUB();
1251 IEMOP_RAISE_INVALID_OPCODE_RET();
1252}
1253#endif
1254
1255
1256/** Opcode 0x0f 0x01 /0. */
1257#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1258FNIEMOP_DEF(iemOp_Grp7_vmresume)
1259{
1260 IEMOP_MNEMONIC(vmresume, "vmresume");
1261 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1262 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1263 IEMOP_HLP_DONE_DECODING();
1264 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1265 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1266 iemCImpl_vmresume);
1267}
1268#else
1269FNIEMOP_DEF(iemOp_Grp7_vmresume)
1270{
1271 IEMOP_BITCH_ABOUT_STUB();
1272 IEMOP_RAISE_INVALID_OPCODE_RET();
1273}
1274#endif
1275
1276
1277/** Opcode 0x0f 0x01 /0. */
1278#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1279FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1280{
1281 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1282 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1283 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1284 IEMOP_HLP_DONE_DECODING();
1285 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1286}
1287#else
1288FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1289{
1290 IEMOP_BITCH_ABOUT_STUB();
1291 IEMOP_RAISE_INVALID_OPCODE_RET();
1292}
1293#endif
1294
1295
1296/** Opcode 0x0f 0x01 /1. */
1297FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1298{
1299 IEMOP_MNEMONIC(sidt, "sidt Ms");
1300 IEMOP_HLP_MIN_286();
1301 IEMOP_HLP_64BIT_OP_SIZE();
1302 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1303 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1306 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1307 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1308 IEM_MC_END();
1309}
1310
1311
1312/** Opcode 0x0f 0x01 /1. */
1313FNIEMOP_DEF(iemOp_Grp7_monitor)
1314{
1315 IEMOP_MNEMONIC(monitor, "monitor");
1316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1317 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1318}
1319
1320
1321/** Opcode 0x0f 0x01 /1. */
1322FNIEMOP_DEF(iemOp_Grp7_mwait)
1323{
1324 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1326 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1327}
1328
1329
1330/** Opcode 0x0f 0x01 /2. */
1331FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1332{
1333 IEMOP_MNEMONIC(lgdt, "lgdt");
1334 IEMOP_HLP_64BIT_OP_SIZE();
1335 IEM_MC_BEGIN(0, 0);
1336 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1339 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1340 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1341 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1342 IEM_MC_END();
1343}
1344
1345
1346/** Opcode 0x0f 0x01 0xd0. */
1347FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1348{
1349 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1350 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1351 {
1352 /** @todo r=ramshankar: We should use
1353 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1354 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1355/** @todo testcase: test prefixes and exceptions. currently not checking for the
1356 * OPSIZE one ... */
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1358 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1359 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1360 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1361 iemCImpl_xgetbv);
1362 }
1363 IEMOP_RAISE_INVALID_OPCODE_RET();
1364}
1365
1366
1367/** Opcode 0x0f 0x01 0xd1. */
1368FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1369{
1370 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1371 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1372 {
1373 /** @todo r=ramshankar: We should use
1374 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1375 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1376/** @todo testcase: test prefixes and exceptions. currently not checking for the
1377 * OPSIZE one ... */
1378 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1379 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1380 }
1381 IEMOP_RAISE_INVALID_OPCODE_RET();
1382}
1383
1384
1385/** Opcode 0x0f 0x01 /3. */
1386FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1387{
1388 IEMOP_MNEMONIC(lidt, "lidt");
1389 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1390 IEM_MC_BEGIN(0, 0);
1391 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1394 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1395 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1396 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1397 IEM_MC_END();
1398}
1399
1400
1401/** Opcode 0x0f 0x01 0xd8. */
1402#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1403FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1404{
1405 IEMOP_MNEMONIC(vmrun, "vmrun");
1406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1407 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1408 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1409 iemCImpl_vmrun);
1410}
1411#else
1412FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1413#endif
1414
1415/** Opcode 0x0f 0x01 0xd9. */
1416FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1417{
1418 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1419 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1420 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1421 * here cannot be right... */
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1423
1424 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1425 want all hypercalls regardless of instruction used, and if a
1426 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1427 (NEM/win makes ASSUMPTIONS about this behavior.) */
1428 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1429}
1430
1431/** Opcode 0x0f 0x01 0xda. */
1432#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1433FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1434{
1435 IEMOP_MNEMONIC(vmload, "vmload");
1436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1437 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1438}
1439#else
1440FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1441#endif
1442
1443
1444/** Opcode 0x0f 0x01 0xdb. */
1445#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1446FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1447{
1448 IEMOP_MNEMONIC(vmsave, "vmsave");
1449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1450 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1451}
1452#else
1453FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1454#endif
1455
1456
1457/** Opcode 0x0f 0x01 0xdc. */
1458#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1459FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1460{
1461 IEMOP_MNEMONIC(stgi, "stgi");
1462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1463 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1464}
1465#else
1466FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1467#endif
1468
1469
1470/** Opcode 0x0f 0x01 0xdd. */
1471#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1472FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1473{
1474 IEMOP_MNEMONIC(clgi, "clgi");
1475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1476 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1477}
1478#else
1479FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1480#endif
1481
1482
1483/** Opcode 0x0f 0x01 0xdf. */
1484#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1485FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1486{
1487 IEMOP_MNEMONIC(invlpga, "invlpga");
1488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1489 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1490}
1491#else
1492FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1493#endif
1494
1495
1496/** Opcode 0x0f 0x01 0xde. */
1497#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1498FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1499{
1500 IEMOP_MNEMONIC(skinit, "skinit");
1501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1502 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1503}
1504#else
1505FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1506#endif
1507
1508
1509/** Opcode 0x0f 0x01 /4. */
1510FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1511{
1512 IEMOP_MNEMONIC(smsw, "smsw");
1513 IEMOP_HLP_MIN_286();
1514 if (IEM_IS_MODRM_REG_MODE(bRm))
1515 {
1516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1517 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1518 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1519 }
1520
1521 /* Ignore operand size here, memory refs are always 16-bit. */
1522 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1523 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1526 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1527 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1528 IEM_MC_END();
1529}
1530
1531
1532/** Opcode 0x0f 0x01 /6. */
1533FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1534{
1535 /* The operand size is effectively ignored, all is 16-bit and only the
1536 lower 3-bits are used. */
1537 IEMOP_MNEMONIC(lmsw, "lmsw");
1538 IEMOP_HLP_MIN_286();
1539 if (IEM_IS_MODRM_REG_MODE(bRm))
1540 {
1541 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1543 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1544 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1545 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1546 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1547 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1548 IEM_MC_END();
1549 }
1550 else
1551 {
1552 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1554 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1558 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1559 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1560 IEM_MC_END();
1561 }
1562}
1563
1564
1565/** Opcode 0x0f 0x01 /7. */
1566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1567{
1568 IEMOP_MNEMONIC(invlpg, "invlpg");
1569 IEMOP_HLP_MIN_486();
1570 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1571 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1574 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1575 IEM_MC_END();
1576}
1577
1578
1579/** Opcode 0x0f 0x01 0xf8. */
1580FNIEMOP_DEF(iemOp_Grp7_swapgs)
1581{
1582 IEMOP_MNEMONIC(swapgs, "swapgs");
1583 IEMOP_HLP_ONLY_64BIT();
1584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1585 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1586}
1587
1588
1589/** Opcode 0x0f 0x01 0xf9. */
1590FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1591{
1592 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1594 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1595 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1596 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1597 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1598 iemCImpl_rdtscp);
1599}
1600
1601
1602/**
1603 * Group 7 jump table, memory variant.
1604 */
1605IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1606{
1607 iemOp_Grp7_sgdt,
1608 iemOp_Grp7_sidt,
1609 iemOp_Grp7_lgdt,
1610 iemOp_Grp7_lidt,
1611 iemOp_Grp7_smsw,
1612 iemOp_InvalidWithRM,
1613 iemOp_Grp7_lmsw,
1614 iemOp_Grp7_invlpg
1615};
1616
1617
1618/** Opcode 0x0f 0x01. */
1619FNIEMOP_DEF(iemOp_Grp7)
1620{
1621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1622 if (IEM_IS_MODRM_MEM_MODE(bRm))
1623 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1624
1625 switch (IEM_GET_MODRM_REG_8(bRm))
1626 {
1627 case 0:
1628 switch (IEM_GET_MODRM_RM_8(bRm))
1629 {
1630 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1631 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1632 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1633 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1634 }
1635 IEMOP_RAISE_INVALID_OPCODE_RET();
1636
1637 case 1:
1638 switch (IEM_GET_MODRM_RM_8(bRm))
1639 {
1640 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1641 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1642 }
1643 IEMOP_RAISE_INVALID_OPCODE_RET();
1644
1645 case 2:
1646 switch (IEM_GET_MODRM_RM_8(bRm))
1647 {
1648 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1649 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1650 }
1651 IEMOP_RAISE_INVALID_OPCODE_RET();
1652
1653 case 3:
1654 switch (IEM_GET_MODRM_RM_8(bRm))
1655 {
1656 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1657 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1658 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1659 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1660 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1661 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1662 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1663 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1665 }
1666
1667 case 4:
1668 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1669
1670 case 5:
1671 IEMOP_RAISE_INVALID_OPCODE_RET();
1672
1673 case 6:
1674 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1675
1676 case 7:
1677 switch (IEM_GET_MODRM_RM_8(bRm))
1678 {
1679 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1680 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1681 }
1682 IEMOP_RAISE_INVALID_OPCODE_RET();
1683
1684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1685 }
1686}
1687
1688FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1689{
1690 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1692
1693 if (IEM_IS_MODRM_REG_MODE(bRm))
1694 {
1695 switch (pVCpu->iem.s.enmEffOpSize)
1696 {
1697 case IEMMODE_16BIT:
1698 IEM_MC_BEGIN(0, 0);
1699 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1700 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1701 IEM_MC_ARG(uint16_t, u16Sel, 1);
1702 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1703
1704 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1705 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1706 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1707 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1708
1709 IEM_MC_END();
1710 break;
1711
1712 case IEMMODE_32BIT:
1713 case IEMMODE_64BIT:
1714 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1715 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1716 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1717 IEM_MC_ARG(uint16_t, u16Sel, 1);
1718 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1719
1720 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1721 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1722 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1723 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1724
1725 IEM_MC_END();
1726 break;
1727
1728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1729 }
1730 }
1731 else
1732 {
1733 switch (pVCpu->iem.s.enmEffOpSize)
1734 {
1735 case IEMMODE_16BIT:
1736 IEM_MC_BEGIN(0, 0);
1737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1738 IEM_MC_ARG(uint16_t, u16Sel, 1);
1739 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1741
1742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1743 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1744
1745 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1746 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1747 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1748 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1749
1750 IEM_MC_END();
1751 break;
1752
1753 case IEMMODE_32BIT:
1754 case IEMMODE_64BIT:
1755 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1756 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1757 IEM_MC_ARG(uint16_t, u16Sel, 1);
1758 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1760
1761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1762 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1763/** @todo testcase: make sure it's a 16-bit read. */
1764
1765 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1766 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1767 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1768 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1769
1770 IEM_MC_END();
1771 break;
1772
1773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1774 }
1775 }
1776}
1777
1778
1779
1780/**
1781 * @opcode 0x02
1782 * @opflmodify zf
1783 */
1784FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1785{
1786 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1787 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1788}
1789
1790
1791/**
1792 * @opcode 0x03
1793 * @opflmodify zf
1794 */
1795FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1796{
1797 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1798 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1799}
1800
1801
1802/** Opcode 0x0f 0x05. */
1803FNIEMOP_DEF(iemOp_syscall)
1804{
1805 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1807 /** @todo r=aeichner Clobbers cr0 only if this is a 286 LOADALL instruction. */
1808 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1809 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1810 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_syscall);
1811}
1812
1813
1814/** Opcode 0x0f 0x06. */
1815FNIEMOP_DEF(iemOp_clts)
1816{
1817 IEMOP_MNEMONIC(clts, "clts");
1818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1819 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1820}
1821
1822
1823/** Opcode 0x0f 0x07. */
1824FNIEMOP_DEF(iemOp_sysret)
1825{
1826 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1828 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1829 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1830 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1831}
1832
1833
1834/** Opcode 0x0f 0x08. */
1835FNIEMOP_DEF(iemOp_invd)
1836{
1837 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1838 IEMOP_HLP_MIN_486();
1839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1840 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1841}
1842
1843
1844/** Opcode 0x0f 0x09. */
1845FNIEMOP_DEF(iemOp_wbinvd)
1846{
1847 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1848 IEMOP_HLP_MIN_486();
1849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1850 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1851}
1852
1853
1854/** Opcode 0x0f 0x0b. */
1855FNIEMOP_DEF(iemOp_ud2)
1856{
1857 IEMOP_MNEMONIC(ud2, "ud2");
1858 IEMOP_RAISE_INVALID_OPCODE_RET();
1859}
1860
1861/** Opcode 0x0f 0x0d. */
1862FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1863{
1864 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1865 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1866 {
1867 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1868 IEMOP_RAISE_INVALID_OPCODE_RET();
1869 }
1870
1871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1872 if (IEM_IS_MODRM_REG_MODE(bRm))
1873 {
1874 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1875 IEMOP_RAISE_INVALID_OPCODE_RET();
1876 }
1877
1878 switch (IEM_GET_MODRM_REG_8(bRm))
1879 {
1880 case 2: /* Aliased to /0 for the time being. */
1881 case 4: /* Aliased to /0 for the time being. */
1882 case 5: /* Aliased to /0 for the time being. */
1883 case 6: /* Aliased to /0 for the time being. */
1884 case 7: /* Aliased to /0 for the time being. */
1885 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1886 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1887 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1889 }
1890
1891 IEM_MC_BEGIN(0, 0);
1892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1895 /* Currently a NOP. */
1896 IEM_MC_NOREF(GCPtrEffSrc);
1897 IEM_MC_ADVANCE_RIP_AND_FINISH();
1898 IEM_MC_END();
1899}
1900
1901
1902/** Opcode 0x0f 0x0e. */
1903FNIEMOP_DEF(iemOp_femms)
1904{
1905 IEMOP_MNEMONIC(femms, "femms");
1906
1907 IEM_MC_BEGIN(0, 0);
1908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1911 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1912 IEM_MC_FPU_FROM_MMX_MODE();
1913 IEM_MC_ADVANCE_RIP_AND_FINISH();
1914 IEM_MC_END();
1915}
1916
1917
1918/** Opcode 0x0f 0x0f. */
1919FNIEMOP_DEF(iemOp_3Dnow)
1920{
1921 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1922 {
1923 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1924 IEMOP_RAISE_INVALID_OPCODE_RET();
1925 }
1926
1927#ifdef IEM_WITH_3DNOW
1928 /* This is pretty sparse, use switch instead of table. */
1929 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1930 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1931#else
1932 IEMOP_BITCH_ABOUT_STUB();
1933 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1934#endif
1935}
1936
1937
1938/**
1939 * @opcode 0x10
1940 * @oppfx none
1941 * @opcpuid sse
1942 * @opgroup og_sse_simdfp_datamove
1943 * @opxcpttype 4UA
1944 * @optest op1=1 op2=2 -> op1=2
1945 * @optest op1=0 op2=-22 -> op1=-22
1946 */
1947FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1948{
1949 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1951 if (IEM_IS_MODRM_REG_MODE(bRm))
1952 {
1953 /*
1954 * XMM128, XMM128.
1955 */
1956 IEM_MC_BEGIN(0, 0);
1957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
1958 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1959 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1960 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1961 IEM_GET_MODRM_RM(pVCpu, bRm));
1962 IEM_MC_ADVANCE_RIP_AND_FINISH();
1963 IEM_MC_END();
1964 }
1965 else
1966 {
1967 /*
1968 * XMM128, [mem128].
1969 */
1970 IEM_MC_BEGIN(0, 0);
1971 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1973
1974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
1976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1977 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1978
1979 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1980 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1981
1982 IEM_MC_ADVANCE_RIP_AND_FINISH();
1983 IEM_MC_END();
1984 }
1985
1986}
1987
1988
1989/**
1990 * @opcode 0x10
1991 * @oppfx 0x66
1992 * @opcpuid sse2
1993 * @opgroup og_sse2_pcksclr_datamove
1994 * @opxcpttype 4UA
1995 * @optest op1=1 op2=2 -> op1=2
1996 * @optest op1=0 op2=-42 -> op1=-42
1997 */
1998FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1999{
2000 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2002 if (IEM_IS_MODRM_REG_MODE(bRm))
2003 {
2004 /*
2005 * XMM128, XMM128.
2006 */
2007 IEM_MC_BEGIN(0, 0);
2008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2009 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2010 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2011 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2012 IEM_GET_MODRM_RM(pVCpu, bRm));
2013 IEM_MC_ADVANCE_RIP_AND_FINISH();
2014 IEM_MC_END();
2015 }
2016 else
2017 {
2018 /*
2019 * XMM128, [mem128].
2020 */
2021 IEM_MC_BEGIN(0, 0);
2022 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2024
2025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2028 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2029
2030 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2031 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2032
2033 IEM_MC_ADVANCE_RIP_AND_FINISH();
2034 IEM_MC_END();
2035 }
2036}
2037
2038
2039/**
2040 * @opcode 0x10
2041 * @oppfx 0xf3
2042 * @opcpuid sse
2043 * @opgroup og_sse_simdfp_datamove
2044 * @opxcpttype 5
2045 * @optest op1=1 op2=2 -> op1=2
2046 * @optest op1=0 op2=-22 -> op1=-22
2047 */
2048FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2049{
2050 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052 if (IEM_IS_MODRM_REG_MODE(bRm))
2053 {
2054 /*
2055 * XMM32, XMM32.
2056 */
2057 IEM_MC_BEGIN(0, 0);
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2059 IEM_MC_LOCAL(uint32_t, uSrc);
2060
2061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2062 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2063 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2064 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2065
2066 IEM_MC_ADVANCE_RIP_AND_FINISH();
2067 IEM_MC_END();
2068 }
2069 else
2070 {
2071 /*
2072 * XMM128, [mem32].
2073 */
2074 IEM_MC_BEGIN(0, 0);
2075 IEM_MC_LOCAL(uint32_t, uSrc);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077
2078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2080 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2081 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2082
2083 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2084 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2085
2086 IEM_MC_ADVANCE_RIP_AND_FINISH();
2087 IEM_MC_END();
2088 }
2089}
2090
2091
2092/**
2093 * @opcode 0x10
2094 * @oppfx 0xf2
2095 * @opcpuid sse2
2096 * @opgroup og_sse2_pcksclr_datamove
2097 * @opxcpttype 5
2098 * @optest op1=1 op2=2 -> op1=2
2099 * @optest op1=0 op2=-42 -> op1=-42
2100 */
2101FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2102{
2103 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2105 if (IEM_IS_MODRM_REG_MODE(bRm))
2106 {
2107 /*
2108 * XMM64, XMM64.
2109 */
2110 IEM_MC_BEGIN(0, 0);
2111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2112 IEM_MC_LOCAL(uint64_t, uSrc);
2113
2114 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2115 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2116 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2117 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2118
2119 IEM_MC_ADVANCE_RIP_AND_FINISH();
2120 IEM_MC_END();
2121 }
2122 else
2123 {
2124 /*
2125 * XMM128, [mem64].
2126 */
2127 IEM_MC_BEGIN(0, 0);
2128 IEM_MC_LOCAL(uint64_t, uSrc);
2129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2130
2131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2133 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2135
2136 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2137 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2138
2139 IEM_MC_ADVANCE_RIP_AND_FINISH();
2140 IEM_MC_END();
2141 }
2142}
2143
2144
2145/**
2146 * @opcode 0x11
2147 * @oppfx none
2148 * @opcpuid sse
2149 * @opgroup og_sse_simdfp_datamove
2150 * @opxcpttype 4UA
2151 * @optest op1=1 op2=2 -> op1=2
2152 * @optest op1=0 op2=-42 -> op1=-42
2153 */
2154FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2155{
2156 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2158 if (IEM_IS_MODRM_REG_MODE(bRm))
2159 {
2160 /*
2161 * XMM128, XMM128.
2162 */
2163 IEM_MC_BEGIN(0, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2165 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2166 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2167 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2168 IEM_GET_MODRM_REG(pVCpu, bRm));
2169 IEM_MC_ADVANCE_RIP_AND_FINISH();
2170 IEM_MC_END();
2171 }
2172 else
2173 {
2174 /*
2175 * [mem128], XMM128.
2176 */
2177 IEM_MC_BEGIN(0, 0);
2178 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2180
2181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2183 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2185
2186 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2187 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2188
2189 IEM_MC_ADVANCE_RIP_AND_FINISH();
2190 IEM_MC_END();
2191 }
2192}
2193
2194
2195/**
2196 * @opcode 0x11
2197 * @oppfx 0x66
2198 * @opcpuid sse2
2199 * @opgroup og_sse2_pcksclr_datamove
2200 * @opxcpttype 4UA
2201 * @optest op1=1 op2=2 -> op1=2
2202 * @optest op1=0 op2=-42 -> op1=-42
2203 */
2204FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2205{
2206 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2208 if (IEM_IS_MODRM_REG_MODE(bRm))
2209 {
2210 /*
2211 * XMM128, XMM128.
2212 */
2213 IEM_MC_BEGIN(0, 0);
2214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2215 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2216 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2217 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2218 IEM_GET_MODRM_REG(pVCpu, bRm));
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 }
2222 else
2223 {
2224 /*
2225 * [mem128], XMM128.
2226 */
2227 IEM_MC_BEGIN(0, 0);
2228 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2230
2231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2233 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2234 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2235
2236 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2237 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2238
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 }
2242}
2243
2244
2245/**
2246 * @opcode 0x11
2247 * @oppfx 0xf3
2248 * @opcpuid sse
2249 * @opgroup og_sse_simdfp_datamove
2250 * @opxcpttype 5
2251 * @optest op1=1 op2=2 -> op1=2
2252 * @optest op1=0 op2=-22 -> op1=-22
2253 */
2254FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2255{
2256 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2258 if (IEM_IS_MODRM_REG_MODE(bRm))
2259 {
2260 /*
2261 * XMM32, XMM32.
2262 */
2263 IEM_MC_BEGIN(0, 0);
2264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2265 IEM_MC_LOCAL(uint32_t, uSrc);
2266
2267 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2268 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2269 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2270 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2271
2272 IEM_MC_ADVANCE_RIP_AND_FINISH();
2273 IEM_MC_END();
2274 }
2275 else
2276 {
2277 /*
2278 * [mem32], XMM32.
2279 */
2280 IEM_MC_BEGIN(0, 0);
2281 IEM_MC_LOCAL(uint32_t, uSrc);
2282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2283
2284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2286 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2287 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2288
2289 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2290 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2291
2292 IEM_MC_ADVANCE_RIP_AND_FINISH();
2293 IEM_MC_END();
2294 }
2295}
2296
2297
2298/**
2299 * @opcode 0x11
2300 * @oppfx 0xf2
2301 * @opcpuid sse2
2302 * @opgroup og_sse2_pcksclr_datamove
2303 * @opxcpttype 5
2304 * @optest op1=1 op2=2 -> op1=2
2305 * @optest op1=0 op2=-42 -> op1=-42
2306 */
2307FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2308{
2309 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2311 if (IEM_IS_MODRM_REG_MODE(bRm))
2312 {
2313 /*
2314 * XMM64, XMM64.
2315 */
2316 IEM_MC_BEGIN(0, 0);
2317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2318 IEM_MC_LOCAL(uint64_t, uSrc);
2319
2320 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2321 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2322 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2323 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2324
2325 IEM_MC_ADVANCE_RIP_AND_FINISH();
2326 IEM_MC_END();
2327 }
2328 else
2329 {
2330 /*
2331 * [mem64], XMM64.
2332 */
2333 IEM_MC_BEGIN(0, 0);
2334 IEM_MC_LOCAL(uint64_t, uSrc);
2335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2336
2337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2341
2342 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2343 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2344
2345 IEM_MC_ADVANCE_RIP_AND_FINISH();
2346 IEM_MC_END();
2347 }
2348}
2349
2350
2351FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2352{
2353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2354 if (IEM_IS_MODRM_REG_MODE(bRm))
2355 {
2356 /**
2357 * @opcode 0x12
2358 * @opcodesub 11 mr/reg
2359 * @oppfx none
2360 * @opcpuid sse
2361 * @opgroup og_sse_simdfp_datamove
2362 * @opxcpttype 5
2363 * @optest op1=1 op2=2 -> op1=2
2364 * @optest op1=0 op2=-42 -> op1=-42
2365 */
2366 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2367
2368 IEM_MC_BEGIN(0, 0);
2369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2370 IEM_MC_LOCAL(uint64_t, uSrc);
2371
2372 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2373 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2374 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2375 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2376
2377 IEM_MC_ADVANCE_RIP_AND_FINISH();
2378 IEM_MC_END();
2379 }
2380 else
2381 {
2382 /**
2383 * @opdone
2384 * @opcode 0x12
2385 * @opcodesub !11 mr/reg
2386 * @oppfx none
2387 * @opcpuid sse
2388 * @opgroup og_sse_simdfp_datamove
2389 * @opxcpttype 5
2390 * @optest op1=1 op2=2 -> op1=2
2391 * @optest op1=0 op2=-42 -> op1=-42
2392 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2393 */
2394 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2395
2396 IEM_MC_BEGIN(0, 0);
2397 IEM_MC_LOCAL(uint64_t, uSrc);
2398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2399
2400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2402 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2403 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2404
2405 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2406 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2407
2408 IEM_MC_ADVANCE_RIP_AND_FINISH();
2409 IEM_MC_END();
2410 }
2411}
2412
2413
2414/**
2415 * @opcode 0x12
2416 * @opcodesub !11 mr/reg
2417 * @oppfx 0x66
2418 * @opcpuid sse2
2419 * @opgroup og_sse2_pcksclr_datamove
2420 * @opxcpttype 5
2421 * @optest op1=1 op2=2 -> op1=2
2422 * @optest op1=0 op2=-42 -> op1=-42
2423 */
2424FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2425{
2426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2427 if (IEM_IS_MODRM_MEM_MODE(bRm))
2428 {
2429 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2430
2431 IEM_MC_BEGIN(0, 0);
2432 IEM_MC_LOCAL(uint64_t, uSrc);
2433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2434
2435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2439
2440 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2441 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2442
2443 IEM_MC_ADVANCE_RIP_AND_FINISH();
2444 IEM_MC_END();
2445 }
2446
2447 /**
2448 * @opdone
2449 * @opmnemonic ud660f12m3
2450 * @opcode 0x12
2451 * @opcodesub 11 mr/reg
2452 * @oppfx 0x66
2453 * @opunused immediate
2454 * @opcpuid sse
2455 * @optest ->
2456 */
2457 else
2458 IEMOP_RAISE_INVALID_OPCODE_RET();
2459}
2460
2461
2462/**
2463 * @opcode 0x12
2464 * @oppfx 0xf3
2465 * @opcpuid sse3
2466 * @opgroup og_sse3_pcksclr_datamove
2467 * @opxcpttype 4
2468 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2469 * op1=0x00000002000000020000000100000001
2470 */
2471FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2472{
2473 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2475 if (IEM_IS_MODRM_REG_MODE(bRm))
2476 {
2477 /*
2478 * XMM, XMM.
2479 */
2480 IEM_MC_BEGIN(0, 0);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2482 IEM_MC_LOCAL(RTUINT128U, uSrc);
2483
2484 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2485 IEM_MC_PREPARE_SSE_USAGE();
2486
2487 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2488 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2489 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2490 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2491 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2492
2493 IEM_MC_ADVANCE_RIP_AND_FINISH();
2494 IEM_MC_END();
2495 }
2496 else
2497 {
2498 /*
2499 * XMM, [mem128].
2500 */
2501 IEM_MC_BEGIN(0, 0);
2502 IEM_MC_LOCAL(RTUINT128U, uSrc);
2503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2504
2505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2507 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2508 IEM_MC_PREPARE_SSE_USAGE();
2509
2510 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2511 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2512 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2513 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2514 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2515
2516 IEM_MC_ADVANCE_RIP_AND_FINISH();
2517 IEM_MC_END();
2518 }
2519}
2520
2521
2522/**
2523 * @opcode 0x12
2524 * @oppfx 0xf2
2525 * @opcpuid sse3
2526 * @opgroup og_sse3_pcksclr_datamove
2527 * @opxcpttype 5
2528 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2529 * op1=0x22222222111111112222222211111111
2530 */
2531FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2532{
2533 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2535 if (IEM_IS_MODRM_REG_MODE(bRm))
2536 {
2537 /*
2538 * XMM128, XMM64.
2539 */
2540 IEM_MC_BEGIN(0, 0);
2541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2542 IEM_MC_LOCAL(uint64_t, uSrc);
2543
2544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2545 IEM_MC_PREPARE_SSE_USAGE();
2546
2547 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2548 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2549 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2550
2551 IEM_MC_ADVANCE_RIP_AND_FINISH();
2552 IEM_MC_END();
2553 }
2554 else
2555 {
2556 /*
2557 * XMM128, [mem64].
2558 */
2559 IEM_MC_BEGIN(0, 0);
2560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2561 IEM_MC_LOCAL(uint64_t, uSrc);
2562
2563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2566 IEM_MC_PREPARE_SSE_USAGE();
2567
2568 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2569 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2570 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2571
2572 IEM_MC_ADVANCE_RIP_AND_FINISH();
2573 IEM_MC_END();
2574 }
2575}
2576
2577
2578/**
2579 * @opcode 0x13
2580 * @opcodesub !11 mr/reg
2581 * @oppfx none
2582 * @opcpuid sse
2583 * @opgroup og_sse_simdfp_datamove
2584 * @opxcpttype 5
2585 * @optest op1=1 op2=2 -> op1=2
2586 * @optest op1=0 op2=-42 -> op1=-42
2587 */
2588FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2589{
2590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2591 if (IEM_IS_MODRM_MEM_MODE(bRm))
2592 {
2593 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2594
2595 IEM_MC_BEGIN(0, 0);
2596 IEM_MC_LOCAL(uint64_t, uSrc);
2597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2598
2599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2601 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2602 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2603
2604 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2605 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2606
2607 IEM_MC_ADVANCE_RIP_AND_FINISH();
2608 IEM_MC_END();
2609 }
2610
2611 /**
2612 * @opdone
2613 * @opmnemonic ud0f13m3
2614 * @opcode 0x13
2615 * @opcodesub 11 mr/reg
2616 * @oppfx none
2617 * @opunused immediate
2618 * @opcpuid sse
2619 * @optest ->
2620 */
2621 else
2622 IEMOP_RAISE_INVALID_OPCODE_RET();
2623}
2624
2625
2626/**
2627 * @opcode 0x13
2628 * @opcodesub !11 mr/reg
2629 * @oppfx 0x66
2630 * @opcpuid sse2
2631 * @opgroup og_sse2_pcksclr_datamove
2632 * @opxcpttype 5
2633 * @optest op1=1 op2=2 -> op1=2
2634 * @optest op1=0 op2=-42 -> op1=-42
2635 */
2636FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2637{
2638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2639 if (IEM_IS_MODRM_MEM_MODE(bRm))
2640 {
2641 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2642
2643 IEM_MC_BEGIN(0, 0);
2644 IEM_MC_LOCAL(uint64_t, uSrc);
2645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2646
2647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2649 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2650 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2651
2652 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2653 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2654
2655 IEM_MC_ADVANCE_RIP_AND_FINISH();
2656 IEM_MC_END();
2657 }
2658
2659 /**
2660 * @opdone
2661 * @opmnemonic ud660f13m3
2662 * @opcode 0x13
2663 * @opcodesub 11 mr/reg
2664 * @oppfx 0x66
2665 * @opunused immediate
2666 * @opcpuid sse
2667 * @optest ->
2668 */
2669 else
2670 IEMOP_RAISE_INVALID_OPCODE_RET();
2671}
2672
2673
2674/**
2675 * @opmnemonic udf30f13
2676 * @opcode 0x13
2677 * @oppfx 0xf3
2678 * @opunused intel-modrm
2679 * @opcpuid sse
2680 * @optest ->
2681 * @opdone
2682 */
2683
2684/**
2685 * @opmnemonic udf20f13
2686 * @opcode 0x13
2687 * @oppfx 0xf2
2688 * @opunused intel-modrm
2689 * @opcpuid sse
2690 * @optest ->
2691 * @opdone
2692 */
2693
2694/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2695FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2696{
2697 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2698 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2699}
2700
2701
2702/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2703FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2704{
2705 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2706 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2707}
2708
2709
2710/**
2711 * @opdone
2712 * @opmnemonic udf30f14
2713 * @opcode 0x14
2714 * @oppfx 0xf3
2715 * @opunused intel-modrm
2716 * @opcpuid sse
2717 * @optest ->
2718 * @opdone
2719 */
2720
2721/**
2722 * @opmnemonic udf20f14
2723 * @opcode 0x14
2724 * @oppfx 0xf2
2725 * @opunused intel-modrm
2726 * @opcpuid sse
2727 * @optest ->
2728 * @opdone
2729 */
2730
2731/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2732FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2733{
2734 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2735 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2736}
2737
2738
2739/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2740FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2741{
2742 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2743 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2744}
2745
2746
2747/* Opcode 0xf3 0x0f 0x15 - invalid */
2748/* Opcode 0xf2 0x0f 0x15 - invalid */
2749
2750/**
2751 * @opdone
2752 * @opmnemonic udf30f15
2753 * @opcode 0x15
2754 * @oppfx 0xf3
2755 * @opunused intel-modrm
2756 * @opcpuid sse
2757 * @optest ->
2758 * @opdone
2759 */
2760
2761/**
2762 * @opmnemonic udf20f15
2763 * @opcode 0x15
2764 * @oppfx 0xf2
2765 * @opunused intel-modrm
2766 * @opcpuid sse
2767 * @optest ->
2768 * @opdone
2769 */
2770
2771FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2772{
2773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2774 if (IEM_IS_MODRM_REG_MODE(bRm))
2775 {
2776 /**
2777 * @opcode 0x16
2778 * @opcodesub 11 mr/reg
2779 * @oppfx none
2780 * @opcpuid sse
2781 * @opgroup og_sse_simdfp_datamove
2782 * @opxcpttype 5
2783 * @optest op1=1 op2=2 -> op1=2
2784 * @optest op1=0 op2=-42 -> op1=-42
2785 */
2786 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2787
2788 IEM_MC_BEGIN(0, 0);
2789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2790 IEM_MC_LOCAL(uint64_t, uSrc);
2791
2792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2793 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2794 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2795 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2796
2797 IEM_MC_ADVANCE_RIP_AND_FINISH();
2798 IEM_MC_END();
2799 }
2800 else
2801 {
2802 /**
2803 * @opdone
2804 * @opcode 0x16
2805 * @opcodesub !11 mr/reg
2806 * @oppfx none
2807 * @opcpuid sse
2808 * @opgroup og_sse_simdfp_datamove
2809 * @opxcpttype 5
2810 * @optest op1=1 op2=2 -> op1=2
2811 * @optest op1=0 op2=-42 -> op1=-42
2812 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2813 */
2814 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2815
2816 IEM_MC_BEGIN(0, 0);
2817 IEM_MC_LOCAL(uint64_t, uSrc);
2818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2819
2820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2822 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2823 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2824
2825 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2826 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2827
2828 IEM_MC_ADVANCE_RIP_AND_FINISH();
2829 IEM_MC_END();
2830 }
2831}
2832
2833
2834/**
2835 * @opcode 0x16
2836 * @opcodesub !11 mr/reg
2837 * @oppfx 0x66
2838 * @opcpuid sse2
2839 * @opgroup og_sse2_pcksclr_datamove
2840 * @opxcpttype 5
2841 * @optest op1=1 op2=2 -> op1=2
2842 * @optest op1=0 op2=-42 -> op1=-42
2843 */
2844FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2845{
2846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2847 if (IEM_IS_MODRM_MEM_MODE(bRm))
2848 {
2849 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2850
2851 IEM_MC_BEGIN(0, 0);
2852 IEM_MC_LOCAL(uint64_t, uSrc);
2853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2854
2855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2857 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2858 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2859
2860 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2861 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2862
2863 IEM_MC_ADVANCE_RIP_AND_FINISH();
2864 IEM_MC_END();
2865 }
2866
2867 /**
2868 * @opdone
2869 * @opmnemonic ud660f16m3
2870 * @opcode 0x16
2871 * @opcodesub 11 mr/reg
2872 * @oppfx 0x66
2873 * @opunused immediate
2874 * @opcpuid sse
2875 * @optest ->
2876 */
2877 else
2878 IEMOP_RAISE_INVALID_OPCODE_RET();
2879}
2880
2881
2882/**
2883 * @opcode 0x16
2884 * @oppfx 0xf3
2885 * @opcpuid sse3
2886 * @opgroup og_sse3_pcksclr_datamove
2887 * @opxcpttype 4
2888 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2889 * op1=0x00000002000000020000000100000001
2890 */
2891FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2892{
2893 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2895 if (IEM_IS_MODRM_REG_MODE(bRm))
2896 {
2897 /*
2898 * XMM128, XMM128.
2899 */
2900 IEM_MC_BEGIN(0, 0);
2901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2902 IEM_MC_LOCAL(RTUINT128U, uSrc);
2903
2904 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2905 IEM_MC_PREPARE_SSE_USAGE();
2906
2907 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2908 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2909 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2910 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2911 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2912
2913 IEM_MC_ADVANCE_RIP_AND_FINISH();
2914 IEM_MC_END();
2915 }
2916 else
2917 {
2918 /*
2919 * XMM128, [mem128].
2920 */
2921 IEM_MC_BEGIN(0, 0);
2922 IEM_MC_LOCAL(RTUINT128U, uSrc);
2923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2924
2925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2927 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2928 IEM_MC_PREPARE_SSE_USAGE();
2929
2930 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2931 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2932 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2933 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2934 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2935
2936 IEM_MC_ADVANCE_RIP_AND_FINISH();
2937 IEM_MC_END();
2938 }
2939}
2940
2941/**
2942 * @opdone
2943 * @opmnemonic udf30f16
2944 * @opcode 0x16
2945 * @oppfx 0xf2
2946 * @opunused intel-modrm
2947 * @opcpuid sse
2948 * @optest ->
2949 * @opdone
2950 */
2951
2952
2953/**
2954 * @opcode 0x17
2955 * @opcodesub !11 mr/reg
2956 * @oppfx none
2957 * @opcpuid sse
2958 * @opgroup og_sse_simdfp_datamove
2959 * @opxcpttype 5
2960 * @optest op1=1 op2=2 -> op1=2
2961 * @optest op1=0 op2=-42 -> op1=-42
2962 */
2963FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2964{
2965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2966 if (IEM_IS_MODRM_MEM_MODE(bRm))
2967 {
2968 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2969
2970 IEM_MC_BEGIN(0, 0);
2971 IEM_MC_LOCAL(uint64_t, uSrc);
2972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2973
2974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2977 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2978
2979 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
2980 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2981
2982 IEM_MC_ADVANCE_RIP_AND_FINISH();
2983 IEM_MC_END();
2984 }
2985
2986 /**
2987 * @opdone
2988 * @opmnemonic ud0f17m3
2989 * @opcode 0x17
2990 * @opcodesub 11 mr/reg
2991 * @oppfx none
2992 * @opunused immediate
2993 * @opcpuid sse
2994 * @optest ->
2995 */
2996 else
2997 IEMOP_RAISE_INVALID_OPCODE_RET();
2998}
2999
3000
3001/**
3002 * @opcode 0x17
3003 * @opcodesub !11 mr/reg
3004 * @oppfx 0x66
3005 * @opcpuid sse2
3006 * @opgroup og_sse2_pcksclr_datamove
3007 * @opxcpttype 5
3008 * @optest op1=1 op2=2 -> op1=2
3009 * @optest op1=0 op2=-42 -> op1=-42
3010 */
3011FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3012{
3013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3014 if (IEM_IS_MODRM_MEM_MODE(bRm))
3015 {
3016 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3017
3018 IEM_MC_BEGIN(0, 0);
3019 IEM_MC_LOCAL(uint64_t, uSrc);
3020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3021
3022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3024 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3025 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3026
3027 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3028 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3029
3030 IEM_MC_ADVANCE_RIP_AND_FINISH();
3031 IEM_MC_END();
3032 }
3033
3034 /**
3035 * @opdone
3036 * @opmnemonic ud660f17m3
3037 * @opcode 0x17
3038 * @opcodesub 11 mr/reg
3039 * @oppfx 0x66
3040 * @opunused immediate
3041 * @opcpuid sse
3042 * @optest ->
3043 */
3044 else
3045 IEMOP_RAISE_INVALID_OPCODE_RET();
3046}
3047
3048
3049/**
3050 * @opdone
3051 * @opmnemonic udf30f17
3052 * @opcode 0x17
3053 * @oppfx 0xf3
3054 * @opunused intel-modrm
3055 * @opcpuid sse
3056 * @optest ->
3057 * @opdone
3058 */
3059
3060/**
3061 * @opmnemonic udf20f17
3062 * @opcode 0x17
3063 * @oppfx 0xf2
3064 * @opunused intel-modrm
3065 * @opcpuid sse
3066 * @optest ->
3067 * @opdone
3068 */
3069
3070
3071/** Opcode 0x0f 0x18. */
3072FNIEMOP_DEF(iemOp_prefetch_Grp16)
3073{
3074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3075 if (IEM_IS_MODRM_MEM_MODE(bRm))
3076 {
3077 switch (IEM_GET_MODRM_REG_8(bRm))
3078 {
3079 case 4: /* Aliased to /0 for the time being according to AMD. */
3080 case 5: /* Aliased to /0 for the time being according to AMD. */
3081 case 6: /* Aliased to /0 for the time being according to AMD. */
3082 case 7: /* Aliased to /0 for the time being according to AMD. */
3083 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3084 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3085 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3086 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3088 }
3089
3090 IEM_MC_BEGIN(0, 0);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094 /* Currently a NOP. */
3095 IEM_MC_NOREF(GCPtrEffSrc);
3096 IEM_MC_ADVANCE_RIP_AND_FINISH();
3097 IEM_MC_END();
3098 }
3099 else
3100 IEMOP_RAISE_INVALID_OPCODE_RET();
3101}
3102
3103
3104/** Opcode 0x0f 0x19..0x1f. */
3105FNIEMOP_DEF(iemOp_nop_Ev)
3106{
3107 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3109 if (IEM_IS_MODRM_REG_MODE(bRm))
3110 {
3111 IEM_MC_BEGIN(0, 0);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_ADVANCE_RIP_AND_FINISH();
3114 IEM_MC_END();
3115 }
3116 else
3117 {
3118 IEM_MC_BEGIN(0, 0);
3119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3122 /* Currently a NOP. */
3123 IEM_MC_NOREF(GCPtrEffSrc);
3124 IEM_MC_ADVANCE_RIP_AND_FINISH();
3125 IEM_MC_END();
3126 }
3127}
3128
3129
3130/** Opcode 0x0f 0x20. */
3131FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3132{
3133 /* mod is ignored, as is operand size overrides. */
3134/** @todo testcase: check memory encoding. */
3135 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3136 IEMOP_HLP_MIN_386();
3137 if (IEM_IS_64BIT_CODE(pVCpu))
3138 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3139 else
3140 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3141
3142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3143 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3144 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3145 {
3146 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3147 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3148 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3149 iCrReg |= 8;
3150 }
3151 switch (iCrReg)
3152 {
3153 case 0: case 2: case 3: case 4: case 8:
3154 break;
3155 default:
3156 IEMOP_RAISE_INVALID_OPCODE_RET();
3157 }
3158 IEMOP_HLP_DONE_DECODING();
3159
3160 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3161 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3162 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3163}
3164
3165
3166/** Opcode 0x0f 0x21. */
3167FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3168{
3169/** @todo testcase: check memory encoding. */
3170 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3171 IEMOP_HLP_MIN_386();
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3174 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3175 IEMOP_RAISE_INVALID_OPCODE_RET();
3176 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3177 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3178 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3179}
3180
3181
3182/** Opcode 0x0f 0x22. */
3183FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3184{
3185 /* mod is ignored, as is operand size overrides. */
3186 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3187 IEMOP_HLP_MIN_386();
3188 if (IEM_IS_64BIT_CODE(pVCpu))
3189 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3190 else
3191 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3192
3193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3194 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3195 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3196 {
3197 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3198 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3199 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3200 iCrReg |= 8;
3201 }
3202 switch (iCrReg)
3203 {
3204 case 0: case 2: case 3: case 4: case 8:
3205 break;
3206 default:
3207 IEMOP_RAISE_INVALID_OPCODE_RET();
3208 }
3209 IEMOP_HLP_DONE_DECODING();
3210
3211 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3212 if (iCrReg & (2 | 8))
3213 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3214 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3215 else
3216 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3217 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3218}
3219
3220
3221/** Opcode 0x0f 0x23. */
3222FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3223{
3224 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3225 IEMOP_HLP_MIN_386();
3226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3229 IEMOP_RAISE_INVALID_OPCODE_RET();
3230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3231 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3232}
3233
3234
3235/** Opcode 0x0f 0x24. */
3236FNIEMOP_DEF(iemOp_mov_Rd_Td)
3237{
3238 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3239 IEMOP_HLP_MIN_386();
3240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3242 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3243 IEMOP_RAISE_INVALID_OPCODE_RET();
3244 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3245 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3246}
3247
3248
3249/** Opcode 0x0f 0x26. */
3250FNIEMOP_DEF(iemOp_mov_Td_Rd)
3251{
3252 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3253 IEMOP_HLP_MIN_386();
3254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3256 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3257 IEMOP_RAISE_INVALID_OPCODE_RET();
3258 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3259}
3260
3261
3262/**
3263 * @opcode 0x28
3264 * @oppfx none
3265 * @opcpuid sse
3266 * @opgroup og_sse_simdfp_datamove
3267 * @opxcpttype 1
3268 * @optest op1=1 op2=2 -> op1=2
3269 * @optest op1=0 op2=-42 -> op1=-42
3270 */
3271FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3272{
3273 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3275 if (IEM_IS_MODRM_REG_MODE(bRm))
3276 {
3277 /*
3278 * Register, register.
3279 */
3280 IEM_MC_BEGIN(0, 0);
3281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3282 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3283 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3284 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3285 IEM_GET_MODRM_RM(pVCpu, bRm));
3286 IEM_MC_ADVANCE_RIP_AND_FINISH();
3287 IEM_MC_END();
3288 }
3289 else
3290 {
3291 /*
3292 * Register, memory.
3293 */
3294 IEM_MC_BEGIN(0, 0);
3295 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3300 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3301 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3302
3303 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3304 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3305
3306 IEM_MC_ADVANCE_RIP_AND_FINISH();
3307 IEM_MC_END();
3308 }
3309}
3310
3311/**
3312 * @opcode 0x28
3313 * @oppfx 66
3314 * @opcpuid sse2
3315 * @opgroup og_sse2_pcksclr_datamove
3316 * @opxcpttype 1
3317 * @optest op1=1 op2=2 -> op1=2
3318 * @optest op1=0 op2=-42 -> op1=-42
3319 */
3320FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3321{
3322 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3324 if (IEM_IS_MODRM_REG_MODE(bRm))
3325 {
3326 /*
3327 * Register, register.
3328 */
3329 IEM_MC_BEGIN(0, 0);
3330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3331 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3332 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3333 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3334 IEM_GET_MODRM_RM(pVCpu, bRm));
3335 IEM_MC_ADVANCE_RIP_AND_FINISH();
3336 IEM_MC_END();
3337 }
3338 else
3339 {
3340 /*
3341 * Register, memory.
3342 */
3343 IEM_MC_BEGIN(0, 0);
3344 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3346
3347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3350 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3351
3352 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3353 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3354
3355 IEM_MC_ADVANCE_RIP_AND_FINISH();
3356 IEM_MC_END();
3357 }
3358}
3359
3360/* Opcode 0xf3 0x0f 0x28 - invalid */
3361/* Opcode 0xf2 0x0f 0x28 - invalid */
3362
3363/**
3364 * @opcode 0x29
3365 * @oppfx none
3366 * @opcpuid sse
3367 * @opgroup og_sse_simdfp_datamove
3368 * @opxcpttype 1
3369 * @optest op1=1 op2=2 -> op1=2
3370 * @optest op1=0 op2=-42 -> op1=-42
3371 */
3372FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3373{
3374 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3376 if (IEM_IS_MODRM_REG_MODE(bRm))
3377 {
3378 /*
3379 * Register, register.
3380 */
3381 IEM_MC_BEGIN(0, 0);
3382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3385 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3386 IEM_GET_MODRM_REG(pVCpu, bRm));
3387 IEM_MC_ADVANCE_RIP_AND_FINISH();
3388 IEM_MC_END();
3389 }
3390 else
3391 {
3392 /*
3393 * Memory, register.
3394 */
3395 IEM_MC_BEGIN(0, 0);
3396 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3398
3399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3401 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3403
3404 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3405 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3406
3407 IEM_MC_ADVANCE_RIP_AND_FINISH();
3408 IEM_MC_END();
3409 }
3410}
3411
3412/**
3413 * @opcode 0x29
3414 * @oppfx 66
3415 * @opcpuid sse2
3416 * @opgroup og_sse2_pcksclr_datamove
3417 * @opxcpttype 1
3418 * @optest op1=1 op2=2 -> op1=2
3419 * @optest op1=0 op2=-42 -> op1=-42
3420 */
3421FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3422{
3423 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3425 if (IEM_IS_MODRM_REG_MODE(bRm))
3426 {
3427 /*
3428 * Register, register.
3429 */
3430 IEM_MC_BEGIN(0, 0);
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3432 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3434 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3435 IEM_GET_MODRM_REG(pVCpu, bRm));
3436 IEM_MC_ADVANCE_RIP_AND_FINISH();
3437 IEM_MC_END();
3438 }
3439 else
3440 {
3441 /*
3442 * Memory, register.
3443 */
3444 IEM_MC_BEGIN(0, 0);
3445 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3447
3448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3450 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3451 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3452
3453 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3454 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3455
3456 IEM_MC_ADVANCE_RIP_AND_FINISH();
3457 IEM_MC_END();
3458 }
3459}
3460
3461/* Opcode 0xf3 0x0f 0x29 - invalid */
3462/* Opcode 0xf2 0x0f 0x29 - invalid */
3463
3464
3465/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3466FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3467{
3468 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3470 if (IEM_IS_MODRM_REG_MODE(bRm))
3471 {
3472 /*
3473 * XMM, MMX
3474 */
3475 IEM_MC_BEGIN(0, 0);
3476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3477 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3478 IEM_MC_LOCAL(X86XMMREG, Dst);
3479 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3480 IEM_MC_ARG(uint64_t, u64Src, 2);
3481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3482 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3483 IEM_MC_PREPARE_FPU_USAGE();
3484 IEM_MC_FPU_TO_MMX_MODE();
3485
3486 IEM_MC_REF_MXCSR(pfMxcsr);
3487 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3488 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3489
3490 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3491 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3492 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3493 } IEM_MC_ELSE() {
3494 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3495 } IEM_MC_ENDIF();
3496
3497 IEM_MC_ADVANCE_RIP_AND_FINISH();
3498 IEM_MC_END();
3499 }
3500 else
3501 {
3502 /*
3503 * XMM, [mem64]
3504 */
3505 IEM_MC_BEGIN(0, 0);
3506 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3507 IEM_MC_LOCAL(X86XMMREG, Dst);
3508 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3509 IEM_MC_ARG(uint64_t, u64Src, 2);
3510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3511
3512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3514 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3515 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3516 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3517
3518 IEM_MC_PREPARE_FPU_USAGE();
3519 IEM_MC_FPU_TO_MMX_MODE();
3520 IEM_MC_REF_MXCSR(pfMxcsr);
3521
3522 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3523 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3524 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3525 } IEM_MC_ELSE() {
3526 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3527 } IEM_MC_ENDIF();
3528
3529 IEM_MC_ADVANCE_RIP_AND_FINISH();
3530 IEM_MC_END();
3531 }
3532}
3533
3534
3535/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3536FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3537{
3538 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3540 if (IEM_IS_MODRM_REG_MODE(bRm))
3541 {
3542 /*
3543 * XMM, MMX
3544 */
3545 IEM_MC_BEGIN(0, 0);
3546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3547 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3548 IEM_MC_LOCAL(X86XMMREG, Dst);
3549 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3550 IEM_MC_ARG(uint64_t, u64Src, 2);
3551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3553 IEM_MC_PREPARE_FPU_USAGE();
3554 IEM_MC_FPU_TO_MMX_MODE();
3555
3556 IEM_MC_REF_MXCSR(pfMxcsr);
3557 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3558
3559 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3560 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3561 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3562 } IEM_MC_ELSE() {
3563 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3564 } IEM_MC_ENDIF();
3565
3566 IEM_MC_ADVANCE_RIP_AND_FINISH();
3567 IEM_MC_END();
3568 }
3569 else
3570 {
3571 /*
3572 * XMM, [mem64]
3573 */
3574 IEM_MC_BEGIN(0, 0);
3575 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3576 IEM_MC_LOCAL(X86XMMREG, Dst);
3577 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3578 IEM_MC_ARG(uint64_t, u64Src, 2);
3579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3580
3581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3583 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3584 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3585 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3586
3587 /* Doesn't cause a transition to MMX mode. */
3588 IEM_MC_PREPARE_SSE_USAGE();
3589 IEM_MC_REF_MXCSR(pfMxcsr);
3590
3591 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3592 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3593 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3594 } IEM_MC_ELSE() {
3595 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3596 } IEM_MC_ENDIF();
3597
3598 IEM_MC_ADVANCE_RIP_AND_FINISH();
3599 IEM_MC_END();
3600 }
3601}
3602
3603
3604/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3605FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3606{
3607 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3608
3609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3610 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3611 {
3612 if (IEM_IS_MODRM_REG_MODE(bRm))
3613 {
3614 /* XMM, greg64 */
3615 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3616 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3617 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3618 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3619
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3621 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3622 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3623
3624 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3625 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3626 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3627 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3628 } IEM_MC_ELSE() {
3629 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3630 } IEM_MC_ENDIF();
3631
3632 IEM_MC_ADVANCE_RIP_AND_FINISH();
3633 IEM_MC_END();
3634 }
3635 else
3636 {
3637 /* XMM, [mem64] */
3638 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3640 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3641 IEM_MC_LOCAL(int64_t, i64Src);
3642 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3643 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3644
3645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3648 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3649
3650 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3651 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3652 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3653 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3654 } IEM_MC_ELSE() {
3655 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3656 } IEM_MC_ENDIF();
3657
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 IEM_MC_END();
3660 }
3661 }
3662 else
3663 {
3664 if (IEM_IS_MODRM_REG_MODE(bRm))
3665 {
3666 /* greg, XMM */
3667 IEM_MC_BEGIN(0, 0);
3668 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3669 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3670 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3671
3672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3674 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3675
3676 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3677 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3678 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3679 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3680 } IEM_MC_ELSE() {
3681 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3682 } IEM_MC_ENDIF();
3683
3684 IEM_MC_ADVANCE_RIP_AND_FINISH();
3685 IEM_MC_END();
3686 }
3687 else
3688 {
3689 /* greg, [mem32] */
3690 IEM_MC_BEGIN(0, 0);
3691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3692 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3693 IEM_MC_LOCAL(int32_t, i32Src);
3694 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3695 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3696
3697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3699 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3700 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3701
3702 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3703 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3704 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3705 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3706 } IEM_MC_ELSE() {
3707 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3708 } IEM_MC_ENDIF();
3709
3710 IEM_MC_ADVANCE_RIP_AND_FINISH();
3711 IEM_MC_END();
3712 }
3713 }
3714}
3715
3716
3717/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3718FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3719{
3720 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3721
3722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3723 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3724 {
3725 if (IEM_IS_MODRM_REG_MODE(bRm))
3726 {
3727 /* XMM, greg64 */
3728 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3729 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3730 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3731 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3732
3733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3734 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3735 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3736
3737 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3738 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3739 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3740 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3741 } IEM_MC_ELSE() {
3742 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3743 } IEM_MC_ENDIF();
3744
3745 IEM_MC_ADVANCE_RIP_AND_FINISH();
3746 IEM_MC_END();
3747 }
3748 else
3749 {
3750 /* XMM, [mem64] */
3751 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3753 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3754 IEM_MC_LOCAL(int64_t, i64Src);
3755 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3756 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3757
3758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3760 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3761 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3762
3763 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3764 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3765 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3766 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3767 } IEM_MC_ELSE() {
3768 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3769 } IEM_MC_ENDIF();
3770
3771 IEM_MC_ADVANCE_RIP_AND_FINISH();
3772 IEM_MC_END();
3773 }
3774 }
3775 else
3776 {
3777 if (IEM_IS_MODRM_REG_MODE(bRm))
3778 {
3779 /* XMM, greg32 */
3780 IEM_MC_BEGIN(0, 0);
3781 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3782 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3783 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3784
3785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3786 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3787 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3788
3789 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3790 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3791 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3792 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3793 } IEM_MC_ELSE() {
3794 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3795 } IEM_MC_ENDIF();
3796
3797 IEM_MC_ADVANCE_RIP_AND_FINISH();
3798 IEM_MC_END();
3799 }
3800 else
3801 {
3802 /* XMM, [mem32] */
3803 IEM_MC_BEGIN(0, 0);
3804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3805 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3806 IEM_MC_LOCAL(int32_t, i32Src);
3807 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3808 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3809
3810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3812 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3813 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3814
3815 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3816 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3817 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3818 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3819 } IEM_MC_ELSE() {
3820 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3821 } IEM_MC_ENDIF();
3822
3823 IEM_MC_ADVANCE_RIP_AND_FINISH();
3824 IEM_MC_END();
3825 }
3826 }
3827}
3828
3829
3830/**
3831 * @opcode 0x2b
3832 * @opcodesub !11 mr/reg
3833 * @oppfx none
3834 * @opcpuid sse
3835 * @opgroup og_sse1_cachect
3836 * @opxcpttype 1
3837 * @optest op1=1 op2=2 -> op1=2
3838 * @optest op1=0 op2=-42 -> op1=-42
3839 */
3840FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3841{
3842 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3844 if (IEM_IS_MODRM_MEM_MODE(bRm))
3845 {
3846 /*
3847 * memory, register.
3848 */
3849 IEM_MC_BEGIN(0, 0);
3850 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3852
3853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3855 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3856 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3857
3858 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3859 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3860
3861 IEM_MC_ADVANCE_RIP_AND_FINISH();
3862 IEM_MC_END();
3863 }
3864 /* The register, register encoding is invalid. */
3865 else
3866 IEMOP_RAISE_INVALID_OPCODE_RET();
3867}
3868
3869/**
3870 * @opcode 0x2b
3871 * @opcodesub !11 mr/reg
3872 * @oppfx 0x66
3873 * @opcpuid sse2
3874 * @opgroup og_sse2_cachect
3875 * @opxcpttype 1
3876 * @optest op1=1 op2=2 -> op1=2
3877 * @optest op1=0 op2=-42 -> op1=-42
3878 */
3879FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3880{
3881 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3883 if (IEM_IS_MODRM_MEM_MODE(bRm))
3884 {
3885 /*
3886 * memory, register.
3887 */
3888 IEM_MC_BEGIN(0, 0);
3889 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3891
3892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3894 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3895 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3896
3897 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3898 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3899
3900 IEM_MC_ADVANCE_RIP_AND_FINISH();
3901 IEM_MC_END();
3902 }
3903 /* The register, register encoding is invalid. */
3904 else
3905 IEMOP_RAISE_INVALID_OPCODE_RET();
3906}
3907/* Opcode 0xf3 0x0f 0x2b - invalid */
3908/* Opcode 0xf2 0x0f 0x2b - invalid */
3909
3910
3911/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3912FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3913{
3914 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3916 if (IEM_IS_MODRM_REG_MODE(bRm))
3917 {
3918 /*
3919 * Register, register.
3920 */
3921 IEM_MC_BEGIN(0, 0);
3922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3923 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3924 IEM_MC_LOCAL(uint64_t, u64Dst);
3925 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
3926 IEM_MC_ARG(uint64_t, u64Src, 2);
3927 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3928 IEM_MC_PREPARE_FPU_USAGE();
3929 IEM_MC_FPU_TO_MMX_MODE();
3930
3931 IEM_MC_REF_MXCSR(pfMxcsr);
3932 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3933
3934 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
3935 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3936 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3937 } IEM_MC_ELSE() {
3938 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3939 } IEM_MC_ENDIF();
3940
3941 IEM_MC_ADVANCE_RIP_AND_FINISH();
3942 IEM_MC_END();
3943 }
3944 else
3945 {
3946 /*
3947 * Register, memory.
3948 */
3949 IEM_MC_BEGIN(0, 0);
3950 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3951 IEM_MC_LOCAL(uint64_t, u64Dst);
3952 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
3953 IEM_MC_ARG(uint64_t, u64Src, 2);
3954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3955
3956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3958 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3959 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3960
3961 IEM_MC_PREPARE_FPU_USAGE();
3962 IEM_MC_FPU_TO_MMX_MODE();
3963 IEM_MC_REF_MXCSR(pfMxcsr);
3964
3965 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
3966 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3967 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3968 } IEM_MC_ELSE() {
3969 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3970 } IEM_MC_ENDIF();
3971
3972 IEM_MC_ADVANCE_RIP_AND_FINISH();
3973 IEM_MC_END();
3974 }
3975}
3976
3977
3978/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3979FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
3980{
3981 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3983 if (IEM_IS_MODRM_REG_MODE(bRm))
3984 {
3985 /*
3986 * Register, register.
3987 */
3988 IEM_MC_BEGIN(0, 0);
3989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3990 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3991 IEM_MC_LOCAL(uint64_t, u64Dst);
3992 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
3993 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
3994 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3995 IEM_MC_PREPARE_FPU_USAGE();
3996 IEM_MC_FPU_TO_MMX_MODE();
3997
3998 IEM_MC_REF_MXCSR(pfMxcsr);
3999 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4000
4001 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4002 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4003 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4004 } IEM_MC_ELSE() {
4005 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4006 } IEM_MC_ENDIF();
4007
4008 IEM_MC_ADVANCE_RIP_AND_FINISH();
4009 IEM_MC_END();
4010 }
4011 else
4012 {
4013 /*
4014 * Register, memory.
4015 */
4016 IEM_MC_BEGIN(0, 0);
4017 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4018 IEM_MC_LOCAL(uint64_t, u64Dst);
4019 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4020 IEM_MC_LOCAL(X86XMMREG, uSrc);
4021 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4023
4024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4026 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4027 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4028
4029 IEM_MC_PREPARE_FPU_USAGE();
4030 IEM_MC_FPU_TO_MMX_MODE();
4031
4032 IEM_MC_REF_MXCSR(pfMxcsr);
4033
4034 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4035 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4036 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4037 } IEM_MC_ELSE() {
4038 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4039 } IEM_MC_ENDIF();
4040
4041 IEM_MC_ADVANCE_RIP_AND_FINISH();
4042 IEM_MC_END();
4043 }
4044}
4045
4046
4047/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4048FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4049{
4050 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4051
4052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4053 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4054 {
4055 if (IEM_IS_MODRM_REG_MODE(bRm))
4056 {
4057 /* greg64, XMM */
4058 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4059 IEM_MC_LOCAL(int64_t, i64Dst);
4060 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4061 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4062
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4064 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4065 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4066
4067 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4068 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4069 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4070 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4071 } IEM_MC_ELSE() {
4072 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4073 } IEM_MC_ENDIF();
4074
4075 IEM_MC_ADVANCE_RIP_AND_FINISH();
4076 IEM_MC_END();
4077 }
4078 else
4079 {
4080 /* greg64, [mem64] */
4081 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4083 IEM_MC_LOCAL(int64_t, i64Dst);
4084 IEM_MC_LOCAL(uint32_t, u32Src);
4085 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4086 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4087
4088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4090 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4091 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4092
4093 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4094 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4095 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4096 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4097 } IEM_MC_ELSE() {
4098 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4099 } IEM_MC_ENDIF();
4100
4101 IEM_MC_ADVANCE_RIP_AND_FINISH();
4102 IEM_MC_END();
4103 }
4104 }
4105 else
4106 {
4107 if (IEM_IS_MODRM_REG_MODE(bRm))
4108 {
4109 /* greg, XMM */
4110 IEM_MC_BEGIN(0, 0);
4111 IEM_MC_LOCAL(int32_t, i32Dst);
4112 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4113 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4114
4115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4117 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4118
4119 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4120 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4121 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4122 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4123 } IEM_MC_ELSE() {
4124 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4125 } IEM_MC_ENDIF();
4126
4127 IEM_MC_ADVANCE_RIP_AND_FINISH();
4128 IEM_MC_END();
4129 }
4130 else
4131 {
4132 /* greg, [mem] */
4133 IEM_MC_BEGIN(0, 0);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4135 IEM_MC_LOCAL(int32_t, i32Dst);
4136 IEM_MC_LOCAL(uint32_t, u32Src);
4137 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4138 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4139
4140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4142 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4143 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4144
4145 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4146 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4147 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4148 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4149 } IEM_MC_ELSE() {
4150 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4151 } IEM_MC_ENDIF();
4152
4153 IEM_MC_ADVANCE_RIP_AND_FINISH();
4154 IEM_MC_END();
4155 }
4156 }
4157}
4158
4159
4160/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4161FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4162{
4163 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4164
4165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4166 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4167 {
4168 if (IEM_IS_MODRM_REG_MODE(bRm))
4169 {
4170 /* greg64, XMM */
4171 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4172 IEM_MC_LOCAL(int64_t, i64Dst);
4173 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4174 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4175
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4178 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4179
4180 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4181 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4182 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4183 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4184 } IEM_MC_ELSE() {
4185 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4186 } IEM_MC_ENDIF();
4187
4188 IEM_MC_ADVANCE_RIP_AND_FINISH();
4189 IEM_MC_END();
4190 }
4191 else
4192 {
4193 /* greg64, [mem64] */
4194 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4196 IEM_MC_LOCAL(int64_t, i64Dst);
4197 IEM_MC_LOCAL(uint64_t, u64Src);
4198 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4199 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4200
4201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4203 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4204 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4205
4206 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4207 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4208 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4209 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4210 } IEM_MC_ELSE() {
4211 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4212 } IEM_MC_ENDIF();
4213
4214 IEM_MC_ADVANCE_RIP_AND_FINISH();
4215 IEM_MC_END();
4216 }
4217 }
4218 else
4219 {
4220 if (IEM_IS_MODRM_REG_MODE(bRm))
4221 {
4222 /* greg, XMM */
4223 IEM_MC_BEGIN(0, 0);
4224 IEM_MC_LOCAL(int32_t, i32Dst);
4225 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4226 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4227
4228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4230 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4231
4232 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4233 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4234 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4235 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4236 } IEM_MC_ELSE() {
4237 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4238 } IEM_MC_ENDIF();
4239
4240 IEM_MC_ADVANCE_RIP_AND_FINISH();
4241 IEM_MC_END();
4242 }
4243 else
4244 {
4245 /* greg32, [mem32] */
4246 IEM_MC_BEGIN(0, 0);
4247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4248 IEM_MC_LOCAL(int32_t, i32Dst);
4249 IEM_MC_LOCAL(uint64_t, u64Src);
4250 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4251 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4252
4253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4255 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4256 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4257
4258 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4259 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4260 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4261 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4262 } IEM_MC_ELSE() {
4263 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4264 } IEM_MC_ENDIF();
4265
4266 IEM_MC_ADVANCE_RIP_AND_FINISH();
4267 IEM_MC_END();
4268 }
4269 }
4270}
4271
4272
4273/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4274FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4275{
4276 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4278 if (IEM_IS_MODRM_REG_MODE(bRm))
4279 {
4280 /*
4281 * Register, register.
4282 */
4283 IEM_MC_BEGIN(0, 0);
4284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4285 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4286 IEM_MC_LOCAL(uint64_t, u64Dst);
4287 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4288 IEM_MC_ARG(uint64_t, u64Src, 2);
4289
4290 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4291 IEM_MC_PREPARE_FPU_USAGE();
4292 IEM_MC_FPU_TO_MMX_MODE();
4293
4294 IEM_MC_REF_MXCSR(pfMxcsr);
4295 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4296
4297 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4298 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4299 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4300 } IEM_MC_ELSE() {
4301 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4302 } IEM_MC_ENDIF();
4303
4304 IEM_MC_ADVANCE_RIP_AND_FINISH();
4305 IEM_MC_END();
4306 }
4307 else
4308 {
4309 /*
4310 * Register, memory.
4311 */
4312 IEM_MC_BEGIN(0, 0);
4313 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4314 IEM_MC_LOCAL(uint64_t, u64Dst);
4315 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4316 IEM_MC_ARG(uint64_t, u64Src, 2);
4317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4318
4319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4321 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4322 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4323
4324 IEM_MC_PREPARE_FPU_USAGE();
4325 IEM_MC_FPU_TO_MMX_MODE();
4326 IEM_MC_REF_MXCSR(pfMxcsr);
4327
4328 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4329 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4330 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4331 } IEM_MC_ELSE() {
4332 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4333 } IEM_MC_ENDIF();
4334
4335 IEM_MC_ADVANCE_RIP_AND_FINISH();
4336 IEM_MC_END();
4337 }
4338}
4339
4340
4341/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4342FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4343{
4344 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4346 if (IEM_IS_MODRM_REG_MODE(bRm))
4347 {
4348 /*
4349 * Register, register.
4350 */
4351 IEM_MC_BEGIN(0, 0);
4352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4353 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4354 IEM_MC_LOCAL(uint64_t, u64Dst);
4355 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4356 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4357
4358 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4359 IEM_MC_PREPARE_FPU_USAGE();
4360 IEM_MC_FPU_TO_MMX_MODE();
4361
4362 IEM_MC_REF_MXCSR(pfMxcsr);
4363 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4364
4365 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4366 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4367 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4368 } IEM_MC_ELSE() {
4369 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4370 } IEM_MC_ENDIF();
4371
4372 IEM_MC_ADVANCE_RIP_AND_FINISH();
4373 IEM_MC_END();
4374 }
4375 else
4376 {
4377 /*
4378 * Register, memory.
4379 */
4380 IEM_MC_BEGIN(0, 0);
4381 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4382 IEM_MC_LOCAL(uint64_t, u64Dst);
4383 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4384 IEM_MC_LOCAL(X86XMMREG, uSrc);
4385 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4387
4388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4390 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4391 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4392
4393 IEM_MC_PREPARE_FPU_USAGE();
4394 IEM_MC_FPU_TO_MMX_MODE();
4395
4396 IEM_MC_REF_MXCSR(pfMxcsr);
4397
4398 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4399 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4400 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4401 } IEM_MC_ELSE() {
4402 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4403 } IEM_MC_ENDIF();
4404
4405 IEM_MC_ADVANCE_RIP_AND_FINISH();
4406 IEM_MC_END();
4407 }
4408}
4409
4410
4411/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4412FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4413{
4414 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4415
4416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4417 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4418 {
4419 if (IEM_IS_MODRM_REG_MODE(bRm))
4420 {
4421 /* greg64, XMM */
4422 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4423 IEM_MC_LOCAL(int64_t, i64Dst);
4424 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4425 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4426
4427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4428 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4429 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4430
4431 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4432 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4433 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4434 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4435 } IEM_MC_ELSE() {
4436 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4437 } IEM_MC_ENDIF();
4438
4439 IEM_MC_ADVANCE_RIP_AND_FINISH();
4440 IEM_MC_END();
4441 }
4442 else
4443 {
4444 /* greg64, [mem64] */
4445 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4447 IEM_MC_LOCAL(int64_t, i64Dst);
4448 IEM_MC_LOCAL(uint32_t, u32Src);
4449 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4450 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4451
4452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4454 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4455 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4456
4457 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4458 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4459 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4460 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4461 } IEM_MC_ELSE() {
4462 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4463 } IEM_MC_ENDIF();
4464
4465 IEM_MC_ADVANCE_RIP_AND_FINISH();
4466 IEM_MC_END();
4467 }
4468 }
4469 else
4470 {
4471 if (IEM_IS_MODRM_REG_MODE(bRm))
4472 {
4473 /* greg, XMM */
4474 IEM_MC_BEGIN(0, 0);
4475 IEM_MC_LOCAL(int32_t, i32Dst);
4476 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4477 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4478
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4480 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4481 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4482
4483 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4484 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4485 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4486 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4487 } IEM_MC_ELSE() {
4488 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4489 } IEM_MC_ENDIF();
4490
4491 IEM_MC_ADVANCE_RIP_AND_FINISH();
4492 IEM_MC_END();
4493 }
4494 else
4495 {
4496 /* greg, [mem] */
4497 IEM_MC_BEGIN(0, 0);
4498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4499 IEM_MC_LOCAL(int32_t, i32Dst);
4500 IEM_MC_LOCAL(uint32_t, u32Src);
4501 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4502 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4503
4504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4507 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4508
4509 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4510 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4511 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4512 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4513 } IEM_MC_ELSE() {
4514 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4515 } IEM_MC_ENDIF();
4516
4517 IEM_MC_ADVANCE_RIP_AND_FINISH();
4518 IEM_MC_END();
4519 }
4520 }
4521}
4522
4523
4524/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4525FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4526{
4527 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4528
4529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4530 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4531 {
4532 if (IEM_IS_MODRM_REG_MODE(bRm))
4533 {
4534 /* greg64, XMM */
4535 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4536 IEM_MC_LOCAL(int64_t, i64Dst);
4537 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4538 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4539
4540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4541 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4542 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4543
4544 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4545 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4546 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4547 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4548 } IEM_MC_ELSE() {
4549 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4550 } IEM_MC_ENDIF();
4551
4552 IEM_MC_ADVANCE_RIP_AND_FINISH();
4553 IEM_MC_END();
4554 }
4555 else
4556 {
4557 /* greg64, [mem64] */
4558 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4560 IEM_MC_LOCAL(int64_t, i64Dst);
4561 IEM_MC_LOCAL(uint64_t, u64Src);
4562 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4563 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4564
4565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4567 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4568 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4569
4570 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4571 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4572 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4573 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4574 } IEM_MC_ELSE() {
4575 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4576 } IEM_MC_ENDIF();
4577
4578 IEM_MC_ADVANCE_RIP_AND_FINISH();
4579 IEM_MC_END();
4580 }
4581 }
4582 else
4583 {
4584 if (IEM_IS_MODRM_REG_MODE(bRm))
4585 {
4586 /* greg32, XMM */
4587 IEM_MC_BEGIN(0, 0);
4588 IEM_MC_LOCAL(int32_t, i32Dst);
4589 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4590 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4591
4592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4593 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4594 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4595
4596 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4597 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4598 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4599 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4600 } IEM_MC_ELSE() {
4601 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4602 } IEM_MC_ENDIF();
4603
4604 IEM_MC_ADVANCE_RIP_AND_FINISH();
4605 IEM_MC_END();
4606 }
4607 else
4608 {
4609 /* greg32, [mem64] */
4610 IEM_MC_BEGIN(0, 0);
4611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4612 IEM_MC_LOCAL(int32_t, i32Dst);
4613 IEM_MC_LOCAL(uint64_t, u64Src);
4614 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4615 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4616
4617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4619 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4620 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4621
4622 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4623 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4624 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4625 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4626 } IEM_MC_ELSE() {
4627 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4628 } IEM_MC_ENDIF();
4629
4630 IEM_MC_ADVANCE_RIP_AND_FINISH();
4631 IEM_MC_END();
4632 }
4633 }
4634}
4635
4636
4637/**
4638 * @opcode 0x2e
4639 * @oppfx none
4640 * @opflmodify cf,pf,af,zf,sf,of
4641 * @opflclear af,sf,of
4642 */
4643FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4644{
4645 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4647 if (IEM_IS_MODRM_REG_MODE(bRm))
4648 {
4649 /*
4650 * Register, register.
4651 */
4652 IEM_MC_BEGIN(0, 0);
4653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4654 IEM_MC_LOCAL(uint32_t, fEFlags);
4655 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4656 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
4657 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
4658 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4659 IEM_MC_PREPARE_SSE_USAGE();
4660 IEM_MC_FETCH_EFLAGS(fEFlags);
4661 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4662 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4663 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, puSrc1, puSrc2);
4664 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4665 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4666 } IEM_MC_ELSE() {
4667 IEM_MC_COMMIT_EFLAGS(fEFlags);
4668 } IEM_MC_ENDIF();
4669
4670 IEM_MC_ADVANCE_RIP_AND_FINISH();
4671 IEM_MC_END();
4672 }
4673 else
4674 {
4675 /*
4676 * Register, memory.
4677 */
4678 IEM_MC_BEGIN(0, 0);
4679 IEM_MC_LOCAL(uint32_t, fEFlags);
4680 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4681 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
4682 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4683 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
4684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4685
4686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4688 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4689 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4690
4691 IEM_MC_PREPARE_SSE_USAGE();
4692 IEM_MC_FETCH_EFLAGS(fEFlags);
4693 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4694 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, puSrc1, puSrc2);
4695 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4696 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4697 } IEM_MC_ELSE() {
4698 IEM_MC_COMMIT_EFLAGS(fEFlags);
4699 } IEM_MC_ENDIF();
4700
4701 IEM_MC_ADVANCE_RIP_AND_FINISH();
4702 IEM_MC_END();
4703 }
4704}
4705
4706
4707/**
4708 * @opcode 0x2e
4709 * @oppfx 0x66
4710 * @opflmodify cf,pf,af,zf,sf,of
4711 * @opflclear af,sf,of
4712 */
4713FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4714{
4715 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4717 if (IEM_IS_MODRM_REG_MODE(bRm))
4718 {
4719 /*
4720 * Register, register.
4721 */
4722 IEM_MC_BEGIN(0, 0);
4723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4724 IEM_MC_LOCAL(uint32_t, fEFlags);
4725 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4726 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
4727 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
4728 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4729 IEM_MC_PREPARE_SSE_USAGE();
4730 IEM_MC_FETCH_EFLAGS(fEFlags);
4731 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4732 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4733 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, puSrc1, puSrc2);
4734 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4735 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4736 } IEM_MC_ELSE() {
4737 IEM_MC_COMMIT_EFLAGS(fEFlags);
4738 } IEM_MC_ENDIF();
4739
4740 IEM_MC_ADVANCE_RIP_AND_FINISH();
4741 IEM_MC_END();
4742 }
4743 else
4744 {
4745 /*
4746 * Register, memory.
4747 */
4748 IEM_MC_BEGIN(0, 0);
4749 IEM_MC_LOCAL(uint32_t, fEFlags);
4750 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4751 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
4752 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4753 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
4754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4755
4756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4758 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4759 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4760
4761 IEM_MC_PREPARE_SSE_USAGE();
4762 IEM_MC_FETCH_EFLAGS(fEFlags);
4763 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4764 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, puSrc1, puSrc2);
4765 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4766 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4767 } IEM_MC_ELSE() {
4768 IEM_MC_COMMIT_EFLAGS(fEFlags);
4769 } IEM_MC_ENDIF();
4770
4771 IEM_MC_ADVANCE_RIP_AND_FINISH();
4772 IEM_MC_END();
4773 }
4774}
4775
4776
4777/* Opcode 0xf3 0x0f 0x2e - invalid */
4778/* Opcode 0xf2 0x0f 0x2e - invalid */
4779
4780
4781/**
4782 * @opcode 0x2e
4783 * @oppfx none
4784 * @opflmodify cf,pf,af,zf,sf,of
4785 * @opflclear af,sf,of
4786 */
4787FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4788{
4789 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4791 if (IEM_IS_MODRM_REG_MODE(bRm))
4792 {
4793 /*
4794 * Register, register.
4795 */
4796 IEM_MC_BEGIN(0, 0);
4797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4798 IEM_MC_LOCAL(uint32_t, fEFlags);
4799 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4800 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
4801 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
4802 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4803 IEM_MC_PREPARE_SSE_USAGE();
4804 IEM_MC_FETCH_EFLAGS(fEFlags);
4805 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4806 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4807 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, puSrc1, puSrc2);
4808 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4809 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4810 } IEM_MC_ELSE() {
4811 IEM_MC_COMMIT_EFLAGS(fEFlags);
4812 } IEM_MC_ENDIF();
4813
4814 IEM_MC_ADVANCE_RIP_AND_FINISH();
4815 IEM_MC_END();
4816 }
4817 else
4818 {
4819 /*
4820 * Register, memory.
4821 */
4822 IEM_MC_BEGIN(0, 0);
4823 IEM_MC_LOCAL(uint32_t, fEFlags);
4824 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4825 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
4826 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4827 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
4828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4829
4830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4832 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4833 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4834
4835 IEM_MC_PREPARE_SSE_USAGE();
4836 IEM_MC_FETCH_EFLAGS(fEFlags);
4837 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4838 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, puSrc1, puSrc2);
4839 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4840 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4841 } IEM_MC_ELSE() {
4842 IEM_MC_COMMIT_EFLAGS(fEFlags);
4843 } IEM_MC_ENDIF();
4844
4845 IEM_MC_ADVANCE_RIP_AND_FINISH();
4846 IEM_MC_END();
4847 }
4848}
4849
4850
4851/**
4852 * @opcode 0x2f
4853 * @oppfx 0x66
4854 * @opflmodify cf,pf,af,zf,sf,of
4855 * @opflclear af,sf,of
4856 */
4857FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4858{
4859 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4861 if (IEM_IS_MODRM_REG_MODE(bRm))
4862 {
4863 /*
4864 * Register, register.
4865 */
4866 IEM_MC_BEGIN(0, 0);
4867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4868 IEM_MC_LOCAL(uint32_t, fEFlags);
4869 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4870 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
4871 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
4872 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4873 IEM_MC_PREPARE_SSE_USAGE();
4874 IEM_MC_FETCH_EFLAGS(fEFlags);
4875 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4876 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4877 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, puSrc1, puSrc2);
4878 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4879 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4880 } IEM_MC_ELSE() {
4881 IEM_MC_COMMIT_EFLAGS(fEFlags);
4882 } IEM_MC_ENDIF();
4883
4884 IEM_MC_ADVANCE_RIP_AND_FINISH();
4885 IEM_MC_END();
4886 }
4887 else
4888 {
4889 /*
4890 * Register, memory.
4891 */
4892 IEM_MC_BEGIN(0, 0);
4893 IEM_MC_LOCAL(uint32_t, fEFlags);
4894 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4895 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
4896 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4897 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
4898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4899
4900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4902 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4903 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4904
4905 IEM_MC_PREPARE_SSE_USAGE();
4906 IEM_MC_FETCH_EFLAGS(fEFlags);
4907 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4908 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, puSrc1, puSrc2);
4909 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4910 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4911 } IEM_MC_ELSE() {
4912 IEM_MC_COMMIT_EFLAGS(fEFlags);
4913 } IEM_MC_ENDIF();
4914
4915 IEM_MC_ADVANCE_RIP_AND_FINISH();
4916 IEM_MC_END();
4917 }
4918}
4919
4920
4921/* Opcode 0xf3 0x0f 0x2f - invalid */
4922/* Opcode 0xf2 0x0f 0x2f - invalid */
4923
4924/** Opcode 0x0f 0x30. */
4925FNIEMOP_DEF(iemOp_wrmsr)
4926{
4927 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4929 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4930}
4931
4932
4933/** Opcode 0x0f 0x31. */
4934FNIEMOP_DEF(iemOp_rdtsc)
4935{
4936 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4938 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4939 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4940 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4941 iemCImpl_rdtsc);
4942}
4943
4944
4945/** Opcode 0x0f 0x33. */
4946FNIEMOP_DEF(iemOp_rdmsr)
4947{
4948 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4950 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4951 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4952 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4953 iemCImpl_rdmsr);
4954}
4955
4956
4957/** Opcode 0x0f 0x34. */
4958FNIEMOP_DEF(iemOp_rdpmc)
4959{
4960 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4962 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4963 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4964 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4965 iemCImpl_rdpmc);
4966}
4967
4968
4969/** Opcode 0x0f 0x34. */
4970FNIEMOP_DEF(iemOp_sysenter)
4971{
4972 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4974 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4975 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4976 iemCImpl_sysenter);
4977}
4978
4979/** Opcode 0x0f 0x35. */
4980FNIEMOP_DEF(iemOp_sysexit)
4981{
4982 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4984 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4985 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4986 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4987}
4988
4989/** Opcode 0x0f 0x37. */
4990FNIEMOP_STUB(iemOp_getsec);
4991
4992
4993/** Opcode 0x0f 0x38. */
4994FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4995{
4996#ifdef IEM_WITH_THREE_0F_38
4997 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4998 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4999#else
5000 IEMOP_BITCH_ABOUT_STUB();
5001 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5002#endif
5003}
5004
5005
5006/** Opcode 0x0f 0x3a. */
5007FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5008{
5009#ifdef IEM_WITH_THREE_0F_3A
5010 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5011 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5012#else
5013 IEMOP_BITCH_ABOUT_STUB();
5014 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5015#endif
5016}
5017
5018
5019/**
5020 * Implements a conditional move.
5021 *
5022 * Wish there was an obvious way to do this where we could share and reduce
5023 * code bloat.
5024 *
5025 * @param a_Cnd The conditional "microcode" operation.
5026 */
5027#define CMOV_X(a_Cnd) \
5028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5029 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5030 { \
5031 switch (pVCpu->iem.s.enmEffOpSize) \
5032 { \
5033 case IEMMODE_16BIT: \
5034 IEM_MC_BEGIN(0, 0); \
5035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5036 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5037 a_Cnd { \
5038 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5039 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5040 } IEM_MC_ENDIF(); \
5041 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5042 IEM_MC_END(); \
5043 break; \
5044 \
5045 case IEMMODE_32BIT: \
5046 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5048 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5049 a_Cnd { \
5050 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5051 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5052 } IEM_MC_ELSE() { \
5053 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5054 } IEM_MC_ENDIF(); \
5055 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5056 IEM_MC_END(); \
5057 break; \
5058 \
5059 case IEMMODE_64BIT: \
5060 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5062 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5063 a_Cnd { \
5064 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5065 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5066 } IEM_MC_ENDIF(); \
5067 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5068 IEM_MC_END(); \
5069 break; \
5070 \
5071 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5072 } \
5073 } \
5074 else \
5075 { \
5076 switch (pVCpu->iem.s.enmEffOpSize) \
5077 { \
5078 case IEMMODE_16BIT: \
5079 IEM_MC_BEGIN(0, 0); \
5080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5081 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5084 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5085 a_Cnd { \
5086 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5087 } IEM_MC_ENDIF(); \
5088 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5089 IEM_MC_END(); \
5090 break; \
5091 \
5092 case IEMMODE_32BIT: \
5093 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5095 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5098 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5099 a_Cnd { \
5100 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5101 } IEM_MC_ELSE() { \
5102 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5103 } IEM_MC_ENDIF(); \
5104 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5105 IEM_MC_END(); \
5106 break; \
5107 \
5108 case IEMMODE_64BIT: \
5109 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5111 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5114 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5115 a_Cnd { \
5116 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5117 } IEM_MC_ENDIF(); \
5118 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5119 IEM_MC_END(); \
5120 break; \
5121 \
5122 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5123 } \
5124 } do {} while (0)
5125
5126
5127
5128/**
5129 * @opcode 0x40
5130 * @opfltest of
5131 */
5132FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5133{
5134 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5135 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5136}
5137
5138
5139/**
5140 * @opcode 0x41
5141 * @opfltest of
5142 */
5143FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5144{
5145 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5146 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5147}
5148
5149
5150/**
5151 * @opcode 0x42
5152 * @opfltest cf
5153 */
5154FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5155{
5156 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5157 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5158}
5159
5160
5161/**
5162 * @opcode 0x43
5163 * @opfltest cf
5164 */
5165FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5166{
5167 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5168 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5169}
5170
5171
5172/**
5173 * @opcode 0x44
5174 * @opfltest zf
5175 */
5176FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5177{
5178 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5179 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5180}
5181
5182
5183/**
5184 * @opcode 0x45
5185 * @opfltest zf
5186 */
5187FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5188{
5189 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5190 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5191}
5192
5193
5194/**
5195 * @opcode 0x46
5196 * @opfltest cf,zf
5197 */
5198FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5199{
5200 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5201 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5202}
5203
5204
5205/**
5206 * @opcode 0x47
5207 * @opfltest cf,zf
5208 */
5209FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5210{
5211 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5212 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5213}
5214
5215
5216/**
5217 * @opcode 0x48
5218 * @opfltest sf
5219 */
5220FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5221{
5222 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5223 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5224}
5225
5226
5227/**
5228 * @opcode 0x49
5229 * @opfltest sf
5230 */
5231FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5232{
5233 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5234 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5235}
5236
5237
5238/**
5239 * @opcode 0x4a
5240 * @opfltest pf
5241 */
5242FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5243{
5244 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5245 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5246}
5247
5248
5249/**
5250 * @opcode 0x4b
5251 * @opfltest pf
5252 */
5253FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5254{
5255 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5256 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5257}
5258
5259
5260/**
5261 * @opcode 0x4c
5262 * @opfltest sf,of
5263 */
5264FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5265{
5266 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5267 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5268}
5269
5270
5271/**
5272 * @opcode 0x4d
5273 * @opfltest sf,of
5274 */
5275FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5276{
5277 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5278 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5279}
5280
5281
5282/**
5283 * @opcode 0x4e
5284 * @opfltest zf,sf,of
5285 */
5286FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5287{
5288 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5289 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5290}
5291
5292
5293/**
5294 * @opcode 0x4e
5295 * @opfltest zf,sf,of
5296 */
5297FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5298{
5299 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5300 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5301}
5302
5303#undef CMOV_X
5304
5305/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5306FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5307{
5308 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5310 if (IEM_IS_MODRM_REG_MODE(bRm))
5311 {
5312 /*
5313 * Register, register.
5314 */
5315 IEM_MC_BEGIN(0, 0);
5316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5317 IEM_MC_LOCAL(uint8_t, u8Dst);
5318 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5319 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5320 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5321 IEM_MC_PREPARE_SSE_USAGE();
5322 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5323 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5324 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5325 IEM_MC_ADVANCE_RIP_AND_FINISH();
5326 IEM_MC_END();
5327 }
5328 /* No memory operand. */
5329 else
5330 IEMOP_RAISE_INVALID_OPCODE_RET();
5331}
5332
5333
5334/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5335FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5336{
5337 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5339 if (IEM_IS_MODRM_REG_MODE(bRm))
5340 {
5341 /*
5342 * Register, register.
5343 */
5344 IEM_MC_BEGIN(0, 0);
5345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5346 IEM_MC_LOCAL(uint8_t, u8Dst);
5347 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5348 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5350 IEM_MC_PREPARE_SSE_USAGE();
5351 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5352 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5353 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5354 IEM_MC_ADVANCE_RIP_AND_FINISH();
5355 IEM_MC_END();
5356 }
5357 /* No memory operand. */
5358 else
5359 IEMOP_RAISE_INVALID_OPCODE_RET();
5360
5361}
5362
5363
5364/* Opcode 0xf3 0x0f 0x50 - invalid */
5365/* Opcode 0xf2 0x0f 0x50 - invalid */
5366
5367
5368/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5369FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5370{
5371 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5372 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5373}
5374
5375
5376/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5377FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5378{
5379 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5380 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5381}
5382
5383
5384/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5385FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5386{
5387 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5388 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5389}
5390
5391
5392/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5393FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5394{
5395 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5396 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5397}
5398
5399
5400/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5401FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5402{
5403 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5404 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5405}
5406
5407
5408/* Opcode 0x66 0x0f 0x52 - invalid */
5409
5410
5411/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5412FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5413{
5414 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5415 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5416}
5417
5418
5419/* Opcode 0xf2 0x0f 0x52 - invalid */
5420
5421
5422/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5423FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5424{
5425 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5426 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5427}
5428
5429
5430/* Opcode 0x66 0x0f 0x53 - invalid */
5431
5432
5433/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5434FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5435{
5436 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5437 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5438}
5439
5440
5441/* Opcode 0xf2 0x0f 0x53 - invalid */
5442
5443
5444/** Opcode 0x0f 0x54 - andps Vps, Wps */
5445FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5446{
5447 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5448 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pand_u128);
5449}
5450
5451
5452/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5453FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5454{
5455 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5456 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pand_u128);
5457}
5458
5459
5460/* Opcode 0xf3 0x0f 0x54 - invalid */
5461/* Opcode 0xf2 0x0f 0x54 - invalid */
5462
5463
5464/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5465FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5466{
5467 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5468 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5469}
5470
5471
5472/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5473FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5474{
5475 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5476 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5477}
5478
5479
5480/* Opcode 0xf3 0x0f 0x55 - invalid */
5481/* Opcode 0xf2 0x0f 0x55 - invalid */
5482
5483
5484/** Opcode 0x0f 0x56 - orps Vps, Wps */
5485FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5486{
5487 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5488 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_por_u128);
5489}
5490
5491
5492/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5493FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5494{
5495 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5496 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_por_u128);
5497}
5498
5499
5500/* Opcode 0xf3 0x0f 0x56 - invalid */
5501/* Opcode 0xf2 0x0f 0x56 - invalid */
5502
5503
5504/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5505FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5506{
5507 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5508 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pxor_u128);
5509}
5510
5511
5512/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5513FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5514{
5515 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5516 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pxor_u128);
5517}
5518
5519
5520/* Opcode 0xf3 0x0f 0x57 - invalid */
5521/* Opcode 0xf2 0x0f 0x57 - invalid */
5522
5523/** Opcode 0x0f 0x58 - addps Vps, Wps */
5524FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5525{
5526 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5527 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5528}
5529
5530
5531/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5532FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5533{
5534 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5535 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5536}
5537
5538
5539/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5540FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5541{
5542 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5543 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5544}
5545
5546
5547/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5548FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5549{
5550 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5551 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5552}
5553
5554
5555/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5556FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5557{
5558 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5559 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5560}
5561
5562
5563/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5564FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5565{
5566 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5567 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5568}
5569
5570
5571/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5572FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5573{
5574 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5575 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5576}
5577
5578
5579/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5580FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5581{
5582 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5583 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5584}
5585
5586
5587/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5588FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5589{
5590 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5591 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5592}
5593
5594
5595/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5596FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5597{
5598 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5599 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5600}
5601
5602
5603/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5604FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5605{
5606 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5607 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5608}
5609
5610
5611/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5612FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5613{
5614 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5615 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5616}
5617
5618
5619/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5620FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5621{
5622 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5623 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5624}
5625
5626
5627/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5628FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5629{
5630 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5631 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5632}
5633
5634
5635/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5636FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5637{
5638 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5639 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5640}
5641
5642
5643/* Opcode 0xf2 0x0f 0x5b - invalid */
5644
5645
5646/** Opcode 0x0f 0x5c - subps Vps, Wps */
5647FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5648{
5649 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5650 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5651}
5652
5653
5654/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5655FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5656{
5657 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5658 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5659}
5660
5661
5662/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5663FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5664{
5665 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5666 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5667}
5668
5669
5670/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5671FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5672{
5673 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5674 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5675}
5676
5677
5678/** Opcode 0x0f 0x5d - minps Vps, Wps */
5679FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5680{
5681 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5682 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5683}
5684
5685
5686/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5687FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5688{
5689 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5690 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5691}
5692
5693
5694/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5695FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5696{
5697 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5698 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5699}
5700
5701
5702/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5703FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5704{
5705 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5706 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5707}
5708
5709
5710/** Opcode 0x0f 0x5e - divps Vps, Wps */
5711FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5712{
5713 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5714 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5715}
5716
5717
5718/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5719FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5720{
5721 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5722 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5723}
5724
5725
5726/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5727FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5728{
5729 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5730 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5731}
5732
5733
5734/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5735FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5736{
5737 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5738 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5739}
5740
5741
5742/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5743FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5744{
5745 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5746 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5747}
5748
5749
5750/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5751FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5752{
5753 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5754 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5755}
5756
5757
5758/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5759FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5760{
5761 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5762 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5763}
5764
5765
5766/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5767FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5768{
5769 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5770 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5771}
5772
5773
5774/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5775FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5776{
5777 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5778 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5779}
5780
5781
5782/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5783FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5784{
5785 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5786 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5787}
5788
5789
5790/* Opcode 0xf3 0x0f 0x60 - invalid */
5791
5792
5793/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5794FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5795{
5796 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5797 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5798 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5799}
5800
5801
5802/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5803FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5804{
5805 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5806 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5807}
5808
5809
5810/* Opcode 0xf3 0x0f 0x61 - invalid */
5811
5812
5813/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5814FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5815{
5816 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5817 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5818}
5819
5820
5821/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5822FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5823{
5824 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5825 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5826}
5827
5828
5829/* Opcode 0xf3 0x0f 0x62 - invalid */
5830
5831
5832
5833/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5834FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5835{
5836 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5837 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5838}
5839
5840
5841/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5842FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5843{
5844 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5845 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5846}
5847
5848
5849/* Opcode 0xf3 0x0f 0x63 - invalid */
5850
5851
5852/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5853FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5854{
5855 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5856 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5857}
5858
5859
5860/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5861FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5862{
5863 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5864 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5865}
5866
5867
5868/* Opcode 0xf3 0x0f 0x64 - invalid */
5869
5870
5871/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5872FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5873{
5874 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5875 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5876}
5877
5878
5879/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5880FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5881{
5882 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5883 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5884}
5885
5886
5887/* Opcode 0xf3 0x0f 0x65 - invalid */
5888
5889
5890/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5891FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5892{
5893 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5894 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5895}
5896
5897
5898/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5899FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5900{
5901 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5902 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5903}
5904
5905
5906/* Opcode 0xf3 0x0f 0x66 - invalid */
5907
5908
5909/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5910FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5911{
5912 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5913 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5914}
5915
5916
5917/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5918FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5919{
5920 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5921 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
5922}
5923
5924
5925/* Opcode 0xf3 0x0f 0x67 - invalid */
5926
5927
5928/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5929 * @note Intel and AMD both uses Qd for the second parameter, however they
5930 * both list it as a mmX/mem64 operand and intel describes it as being
5931 * loaded as a qword, so it should be Qq, shouldn't it? */
5932FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5933{
5934 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5935 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5936}
5937
5938
5939/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5940FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5941{
5942 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5943 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5944}
5945
5946
5947/* Opcode 0xf3 0x0f 0x68 - invalid */
5948
5949
5950/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5951 * @note Intel and AMD both uses Qd for the second parameter, however they
5952 * both list it as a mmX/mem64 operand and intel describes it as being
5953 * loaded as a qword, so it should be Qq, shouldn't it? */
5954FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5955{
5956 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5957 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5958}
5959
5960
5961/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5962FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5963{
5964 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5965 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5966
5967}
5968
5969
5970/* Opcode 0xf3 0x0f 0x69 - invalid */
5971
5972
5973/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5974 * @note Intel and AMD both uses Qd for the second parameter, however they
5975 * both list it as a mmX/mem64 operand and intel describes it as being
5976 * loaded as a qword, so it should be Qq, shouldn't it? */
5977FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5978{
5979 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5980 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5981}
5982
5983
5984/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5985FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5986{
5987 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5988 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5989}
5990
5991
5992/* Opcode 0xf3 0x0f 0x6a - invalid */
5993
5994
5995/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5996FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5997{
5998 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5999 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6000}
6001
6002
6003/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6004FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6005{
6006 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6007 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6008}
6009
6010
6011/* Opcode 0xf3 0x0f 0x6b - invalid */
6012
6013
6014/* Opcode 0x0f 0x6c - invalid */
6015
6016
6017/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6018FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6019{
6020 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6021 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6022}
6023
6024
6025/* Opcode 0xf3 0x0f 0x6c - invalid */
6026/* Opcode 0xf2 0x0f 0x6c - invalid */
6027
6028
6029/* Opcode 0x0f 0x6d - invalid */
6030
6031
6032/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6033FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6034{
6035 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6036 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6037}
6038
6039
6040/* Opcode 0xf3 0x0f 0x6d - invalid */
6041
6042
6043FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6044{
6045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6046 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6047 {
6048 /**
6049 * @opcode 0x6e
6050 * @opcodesub rex.w=1
6051 * @oppfx none
6052 * @opcpuid mmx
6053 * @opgroup og_mmx_datamove
6054 * @opxcpttype 5
6055 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6056 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6057 */
6058 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6059 if (IEM_IS_MODRM_REG_MODE(bRm))
6060 {
6061 /* MMX, greg64 */
6062 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6064 IEM_MC_LOCAL(uint64_t, u64Tmp);
6065
6066 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6067 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6068 IEM_MC_FPU_TO_MMX_MODE();
6069
6070 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6071 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6072
6073 IEM_MC_ADVANCE_RIP_AND_FINISH();
6074 IEM_MC_END();
6075 }
6076 else
6077 {
6078 /* MMX, [mem64] */
6079 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6081 IEM_MC_LOCAL(uint64_t, u64Tmp);
6082
6083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6085 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6086 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6087
6088 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6089 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6090 IEM_MC_FPU_TO_MMX_MODE();
6091
6092 IEM_MC_ADVANCE_RIP_AND_FINISH();
6093 IEM_MC_END();
6094 }
6095 }
6096 else
6097 {
6098 /**
6099 * @opdone
6100 * @opcode 0x6e
6101 * @opcodesub rex.w=0
6102 * @oppfx none
6103 * @opcpuid mmx
6104 * @opgroup og_mmx_datamove
6105 * @opxcpttype 5
6106 * @opfunction iemOp_movd_q_Pd_Ey
6107 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6108 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6109 */
6110 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6111 if (IEM_IS_MODRM_REG_MODE(bRm))
6112 {
6113 /* MMX, greg32 */
6114 IEM_MC_BEGIN(0, 0);
6115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6116 IEM_MC_LOCAL(uint32_t, u32Tmp);
6117
6118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6119 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6120 IEM_MC_FPU_TO_MMX_MODE();
6121
6122 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6123 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6124
6125 IEM_MC_ADVANCE_RIP_AND_FINISH();
6126 IEM_MC_END();
6127 }
6128 else
6129 {
6130 /* MMX, [mem32] */
6131 IEM_MC_BEGIN(0, 0);
6132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6133 IEM_MC_LOCAL(uint32_t, u32Tmp);
6134
6135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6137 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6138 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6139
6140 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6141 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6142 IEM_MC_FPU_TO_MMX_MODE();
6143
6144 IEM_MC_ADVANCE_RIP_AND_FINISH();
6145 IEM_MC_END();
6146 }
6147 }
6148}
6149
6150FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6151{
6152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6153 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6154 {
6155 /**
6156 * @opcode 0x6e
6157 * @opcodesub rex.w=1
6158 * @oppfx 0x66
6159 * @opcpuid sse2
6160 * @opgroup og_sse2_simdint_datamove
6161 * @opxcpttype 5
6162 * @optest 64-bit / op1=1 op2=2 -> op1=2
6163 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6164 */
6165 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6166 if (IEM_IS_MODRM_REG_MODE(bRm))
6167 {
6168 /* XMM, greg64 */
6169 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6171 IEM_MC_LOCAL(uint64_t, u64Tmp);
6172
6173 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6174 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6175
6176 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6177 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6178
6179 IEM_MC_ADVANCE_RIP_AND_FINISH();
6180 IEM_MC_END();
6181 }
6182 else
6183 {
6184 /* XMM, [mem64] */
6185 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6187 IEM_MC_LOCAL(uint64_t, u64Tmp);
6188
6189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6191 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6192 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6193
6194 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6195 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6196
6197 IEM_MC_ADVANCE_RIP_AND_FINISH();
6198 IEM_MC_END();
6199 }
6200 }
6201 else
6202 {
6203 /**
6204 * @opdone
6205 * @opcode 0x6e
6206 * @opcodesub rex.w=0
6207 * @oppfx 0x66
6208 * @opcpuid sse2
6209 * @opgroup og_sse2_simdint_datamove
6210 * @opxcpttype 5
6211 * @opfunction iemOp_movd_q_Vy_Ey
6212 * @optest op1=1 op2=2 -> op1=2
6213 * @optest op1=0 op2=-42 -> op1=-42
6214 */
6215 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6216 if (IEM_IS_MODRM_REG_MODE(bRm))
6217 {
6218 /* XMM, greg32 */
6219 IEM_MC_BEGIN(0, 0);
6220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6221 IEM_MC_LOCAL(uint32_t, u32Tmp);
6222
6223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6225
6226 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6227 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6228
6229 IEM_MC_ADVANCE_RIP_AND_FINISH();
6230 IEM_MC_END();
6231 }
6232 else
6233 {
6234 /* XMM, [mem32] */
6235 IEM_MC_BEGIN(0, 0);
6236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6237 IEM_MC_LOCAL(uint32_t, u32Tmp);
6238
6239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6243
6244 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6245 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6246
6247 IEM_MC_ADVANCE_RIP_AND_FINISH();
6248 IEM_MC_END();
6249 }
6250 }
6251}
6252
6253/* Opcode 0xf3 0x0f 0x6e - invalid */
6254
6255
6256/**
6257 * @opcode 0x6f
6258 * @oppfx none
6259 * @opcpuid mmx
6260 * @opgroup og_mmx_datamove
6261 * @opxcpttype 5
6262 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6263 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6264 */
6265FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6266{
6267 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6269 if (IEM_IS_MODRM_REG_MODE(bRm))
6270 {
6271 /*
6272 * Register, register.
6273 */
6274 IEM_MC_BEGIN(0, 0);
6275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6276 IEM_MC_LOCAL(uint64_t, u64Tmp);
6277
6278 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6279 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6280 IEM_MC_FPU_TO_MMX_MODE();
6281
6282 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6283 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6284
6285 IEM_MC_ADVANCE_RIP_AND_FINISH();
6286 IEM_MC_END();
6287 }
6288 else
6289 {
6290 /*
6291 * Register, memory.
6292 */
6293 IEM_MC_BEGIN(0, 0);
6294 IEM_MC_LOCAL(uint64_t, u64Tmp);
6295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6296
6297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6299 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6300 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6301
6302 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6303 IEM_MC_FPU_TO_MMX_MODE();
6304
6305 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6306
6307 IEM_MC_ADVANCE_RIP_AND_FINISH();
6308 IEM_MC_END();
6309 }
6310}
6311
6312/**
6313 * @opcode 0x6f
6314 * @oppfx 0x66
6315 * @opcpuid sse2
6316 * @opgroup og_sse2_simdint_datamove
6317 * @opxcpttype 1
6318 * @optest op1=1 op2=2 -> op1=2
6319 * @optest op1=0 op2=-42 -> op1=-42
6320 */
6321FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6322{
6323 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6325 if (IEM_IS_MODRM_REG_MODE(bRm))
6326 {
6327 /*
6328 * Register, register.
6329 */
6330 IEM_MC_BEGIN(0, 0);
6331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6332
6333 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6334 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6335
6336 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6337 IEM_GET_MODRM_RM(pVCpu, bRm));
6338 IEM_MC_ADVANCE_RIP_AND_FINISH();
6339 IEM_MC_END();
6340 }
6341 else
6342 {
6343 /*
6344 * Register, memory.
6345 */
6346 IEM_MC_BEGIN(0, 0);
6347 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6349
6350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6352 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6353 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6354
6355 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6356 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6357
6358 IEM_MC_ADVANCE_RIP_AND_FINISH();
6359 IEM_MC_END();
6360 }
6361}
6362
6363/**
6364 * @opcode 0x6f
6365 * @oppfx 0xf3
6366 * @opcpuid sse2
6367 * @opgroup og_sse2_simdint_datamove
6368 * @opxcpttype 4UA
6369 * @optest op1=1 op2=2 -> op1=2
6370 * @optest op1=0 op2=-42 -> op1=-42
6371 */
6372FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6373{
6374 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6376 if (IEM_IS_MODRM_REG_MODE(bRm))
6377 {
6378 /*
6379 * Register, register.
6380 */
6381 IEM_MC_BEGIN(0, 0);
6382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6385 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6386 IEM_GET_MODRM_RM(pVCpu, bRm));
6387 IEM_MC_ADVANCE_RIP_AND_FINISH();
6388 IEM_MC_END();
6389 }
6390 else
6391 {
6392 /*
6393 * Register, memory.
6394 */
6395 IEM_MC_BEGIN(0, 0);
6396 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6398
6399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6401 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6403 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6404 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6405
6406 IEM_MC_ADVANCE_RIP_AND_FINISH();
6407 IEM_MC_END();
6408 }
6409}
6410
6411
6412/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6413FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6414{
6415 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6417 if (IEM_IS_MODRM_REG_MODE(bRm))
6418 {
6419 /*
6420 * Register, register.
6421 */
6422 IEM_MC_BEGIN(0, 0);
6423 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6425 IEM_MC_ARG(uint64_t *, pDst, 0);
6426 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6427 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6428 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6429 IEM_MC_PREPARE_FPU_USAGE();
6430 IEM_MC_FPU_TO_MMX_MODE();
6431
6432 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6433 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6434 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6435 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6436
6437 IEM_MC_ADVANCE_RIP_AND_FINISH();
6438 IEM_MC_END();
6439 }
6440 else
6441 {
6442 /*
6443 * Register, memory.
6444 */
6445 IEM_MC_BEGIN(0, 0);
6446 IEM_MC_ARG(uint64_t *, pDst, 0);
6447 IEM_MC_LOCAL(uint64_t, uSrc);
6448 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6450
6451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6452 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6453 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6455 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6456 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6457
6458 IEM_MC_PREPARE_FPU_USAGE();
6459 IEM_MC_FPU_TO_MMX_MODE();
6460
6461 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6462 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6463 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6464
6465 IEM_MC_ADVANCE_RIP_AND_FINISH();
6466 IEM_MC_END();
6467 }
6468}
6469
6470
6471/**
6472 * Common worker for SSE2 instructions on the forms:
6473 * pshufd xmm1, xmm2/mem128, imm8
6474 * pshufhw xmm1, xmm2/mem128, imm8
6475 * pshuflw xmm1, xmm2/mem128, imm8
6476 *
6477 * Proper alignment of the 128-bit operand is enforced.
6478 * Exceptions type 4. SSE2 cpuid checks.
6479 */
6480FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6481{
6482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6483 if (IEM_IS_MODRM_REG_MODE(bRm))
6484 {
6485 /*
6486 * Register, register.
6487 */
6488 IEM_MC_BEGIN(0, 0);
6489 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6491 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6492 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6493 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6494 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6495 IEM_MC_PREPARE_SSE_USAGE();
6496 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6497 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6498 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6499 IEM_MC_ADVANCE_RIP_AND_FINISH();
6500 IEM_MC_END();
6501 }
6502 else
6503 {
6504 /*
6505 * Register, memory.
6506 */
6507 IEM_MC_BEGIN(0, 0);
6508 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6509 IEM_MC_LOCAL(RTUINT128U, uSrc);
6510 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6512
6513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6514 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6515 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6517 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6518
6519 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6520 IEM_MC_PREPARE_SSE_USAGE();
6521 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6522 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6523
6524 IEM_MC_ADVANCE_RIP_AND_FINISH();
6525 IEM_MC_END();
6526 }
6527}
6528
6529
6530/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6531FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6532{
6533 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6534 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6535}
6536
6537
6538/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6539FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6540{
6541 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6542 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6543}
6544
6545
6546/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6547FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6548{
6549 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6550 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6551}
6552
6553
6554/**
6555 * Common worker for MMX instructions of the form:
6556 * psrlw mm, imm8
6557 * psraw mm, imm8
6558 * psllw mm, imm8
6559 * psrld mm, imm8
6560 * psrad mm, imm8
6561 * pslld mm, imm8
6562 * psrlq mm, imm8
6563 * psllq mm, imm8
6564 *
6565 */
6566FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6567{
6568 if (IEM_IS_MODRM_REG_MODE(bRm))
6569 {
6570 /*
6571 * Register, immediate.
6572 */
6573 IEM_MC_BEGIN(0, 0);
6574 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6576 IEM_MC_ARG(uint64_t *, pDst, 0);
6577 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6578 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6579 IEM_MC_PREPARE_FPU_USAGE();
6580 IEM_MC_FPU_TO_MMX_MODE();
6581
6582 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6583 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6584 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6585
6586 IEM_MC_ADVANCE_RIP_AND_FINISH();
6587 IEM_MC_END();
6588 }
6589 else
6590 {
6591 /*
6592 * Register, memory not supported.
6593 */
6594 /// @todo Caller already enforced register mode?!
6595 AssertFailedReturn(VINF_SUCCESS);
6596 }
6597}
6598
6599
6600/**
6601 * Common worker for SSE2 instructions of the form:
6602 * psrlw xmm, imm8
6603 * psraw xmm, imm8
6604 * psllw xmm, imm8
6605 * psrld xmm, imm8
6606 * psrad xmm, imm8
6607 * pslld xmm, imm8
6608 * psrlq xmm, imm8
6609 * psllq xmm, imm8
6610 *
6611 */
6612FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6613{
6614 if (IEM_IS_MODRM_REG_MODE(bRm))
6615 {
6616 /*
6617 * Register, immediate.
6618 */
6619 IEM_MC_BEGIN(0, 0);
6620 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6622 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6623 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6624 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6625 IEM_MC_PREPARE_SSE_USAGE();
6626 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6627 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6628 IEM_MC_ADVANCE_RIP_AND_FINISH();
6629 IEM_MC_END();
6630 }
6631 else
6632 {
6633 /*
6634 * Register, memory.
6635 */
6636 /// @todo Caller already enforced register mode?!
6637 AssertFailedReturn(VINF_SUCCESS);
6638 }
6639}
6640
6641
6642/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6643FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6644{
6645// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6646 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6647}
6648
6649
6650/** Opcode 0x66 0x0f 0x71 11/2. */
6651FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6652{
6653// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6654 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6655}
6656
6657
6658/** Opcode 0x0f 0x71 11/4. */
6659FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6660{
6661// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6662 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6663}
6664
6665
6666/** Opcode 0x66 0x0f 0x71 11/4. */
6667FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6668{
6669// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6670 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6671}
6672
6673
6674/** Opcode 0x0f 0x71 11/6. */
6675FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6676{
6677// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6678 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6679}
6680
6681
6682/** Opcode 0x66 0x0f 0x71 11/6. */
6683FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6684{
6685// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6686 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6687}
6688
6689
6690/**
6691 * Group 12 jump table for register variant.
6692 */
6693IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6694{
6695 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6696 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6697 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6698 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6699 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6700 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6701 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6702 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6703};
6704AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6705
6706
6707/** Opcode 0x0f 0x71. */
6708FNIEMOP_DEF(iemOp_Grp12)
6709{
6710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6711 if (IEM_IS_MODRM_REG_MODE(bRm))
6712 /* register, register */
6713 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6714 + pVCpu->iem.s.idxPrefix], bRm);
6715 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6716}
6717
6718
6719/** Opcode 0x0f 0x72 11/2. */
6720FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6721{
6722// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6723 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6724}
6725
6726
6727/** Opcode 0x66 0x0f 0x72 11/2. */
6728FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6729{
6730// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6731 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6732}
6733
6734
6735/** Opcode 0x0f 0x72 11/4. */
6736FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6737{
6738// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6739 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6740}
6741
6742
6743/** Opcode 0x66 0x0f 0x72 11/4. */
6744FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6745{
6746// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6747 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6748}
6749
6750
6751/** Opcode 0x0f 0x72 11/6. */
6752FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6753{
6754// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6755 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6756}
6757
6758/** Opcode 0x66 0x0f 0x72 11/6. */
6759FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6760{
6761// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6762 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6763}
6764
6765
6766/**
6767 * Group 13 jump table for register variant.
6768 */
6769IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6770{
6771 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6772 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6773 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6774 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6775 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6776 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6777 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6778 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6779};
6780AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6781
6782/** Opcode 0x0f 0x72. */
6783FNIEMOP_DEF(iemOp_Grp13)
6784{
6785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6786 if (IEM_IS_MODRM_REG_MODE(bRm))
6787 /* register, register */
6788 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6789 + pVCpu->iem.s.idxPrefix], bRm);
6790 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6791}
6792
6793
6794/** Opcode 0x0f 0x73 11/2. */
6795FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6796{
6797// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6798 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6799}
6800
6801
6802/** Opcode 0x66 0x0f 0x73 11/2. */
6803FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6804{
6805// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6806 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6807}
6808
6809
6810/** Opcode 0x66 0x0f 0x73 11/3. */
6811FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6812{
6813// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6814 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6815}
6816
6817
6818/** Opcode 0x0f 0x73 11/6. */
6819FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6820{
6821// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6822 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6823}
6824
6825
6826/** Opcode 0x66 0x0f 0x73 11/6. */
6827FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6828{
6829// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6830 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6831}
6832
6833
6834/** Opcode 0x66 0x0f 0x73 11/7. */
6835FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6836{
6837// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6838 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6839}
6840
6841/**
6842 * Group 14 jump table for register variant.
6843 */
6844IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6845{
6846 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6847 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6848 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6849 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6850 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6851 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6852 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6853 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6854};
6855AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6856
6857
6858/** Opcode 0x0f 0x73. */
6859FNIEMOP_DEF(iemOp_Grp14)
6860{
6861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6862 if (IEM_IS_MODRM_REG_MODE(bRm))
6863 /* register, register */
6864 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6865 + pVCpu->iem.s.idxPrefix], bRm);
6866 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6867}
6868
6869
6870/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6871FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6872{
6873 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6874 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6875}
6876
6877
6878/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6879FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6880{
6881 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6882 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
6883}
6884
6885
6886/* Opcode 0xf3 0x0f 0x74 - invalid */
6887/* Opcode 0xf2 0x0f 0x74 - invalid */
6888
6889
6890/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6891FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6892{
6893 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6894 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6895}
6896
6897
6898/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6899FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6900{
6901 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6902 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
6903}
6904
6905
6906/* Opcode 0xf3 0x0f 0x75 - invalid */
6907/* Opcode 0xf2 0x0f 0x75 - invalid */
6908
6909
6910/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6911FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6912{
6913 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6914 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6915}
6916
6917
6918/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6919FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6920{
6921 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6922 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
6923}
6924
6925
6926/* Opcode 0xf3 0x0f 0x76 - invalid */
6927/* Opcode 0xf2 0x0f 0x76 - invalid */
6928
6929
6930/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6931FNIEMOP_DEF(iemOp_emms)
6932{
6933 IEMOP_MNEMONIC(emms, "emms");
6934 IEM_MC_BEGIN(0, 0);
6935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6938 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6939 IEM_MC_FPU_FROM_MMX_MODE();
6940 IEM_MC_ADVANCE_RIP_AND_FINISH();
6941 IEM_MC_END();
6942}
6943
6944/* Opcode 0x66 0x0f 0x77 - invalid */
6945/* Opcode 0xf3 0x0f 0x77 - invalid */
6946/* Opcode 0xf2 0x0f 0x77 - invalid */
6947
6948/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6949#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6950FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6951{
6952 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6953 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6954 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6955 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6956
6957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6958 if (IEM_IS_MODRM_REG_MODE(bRm))
6959 {
6960 /*
6961 * Register, register.
6962 */
6963 if (enmEffOpSize == IEMMODE_64BIT)
6964 {
6965 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6966 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6967 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6968 IEM_MC_ARG(uint64_t, u64Enc, 1);
6969 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6970 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6971 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6972 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6973 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6974 IEM_MC_END();
6975 }
6976 else
6977 {
6978 IEM_MC_BEGIN(0, 0);
6979 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6980 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6981 IEM_MC_ARG(uint32_t, u32Enc, 1);
6982 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6983 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6984 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6985 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6986 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6987 IEM_MC_END();
6988 }
6989 }
6990 else
6991 {
6992 /*
6993 * Memory, register.
6994 */
6995 if (enmEffOpSize == IEMMODE_64BIT)
6996 {
6997 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6998 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7000 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7001 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7002 IEM_MC_ARG(uint64_t, u64Enc, 2);
7003 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7004 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7005 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7006 IEM_MC_END();
7007 }
7008 else
7009 {
7010 IEM_MC_BEGIN(0, 0);
7011 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7013 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7014 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7015 IEM_MC_ARG(uint32_t, u32Enc, 2);
7016 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7017 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7018 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7019 IEM_MC_END();
7020 }
7021 }
7022}
7023#else
7024FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
7025#endif
7026
7027/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7028FNIEMOP_STUB(iemOp_AmdGrp17);
7029/* Opcode 0xf3 0x0f 0x78 - invalid */
7030/* Opcode 0xf2 0x0f 0x78 - invalid */
7031
7032/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7033#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7034FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7035{
7036 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7037 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7038 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7039 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7040
7041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7042 if (IEM_IS_MODRM_REG_MODE(bRm))
7043 {
7044 /*
7045 * Register, register.
7046 */
7047 if (enmEffOpSize == IEMMODE_64BIT)
7048 {
7049 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7050 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7051 IEM_MC_ARG(uint64_t, u64Val, 0);
7052 IEM_MC_ARG(uint64_t, u64Enc, 1);
7053 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7054 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7055 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7056 IEM_MC_END();
7057 }
7058 else
7059 {
7060 IEM_MC_BEGIN(0, 0);
7061 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7062 IEM_MC_ARG(uint32_t, u32Val, 0);
7063 IEM_MC_ARG(uint32_t, u32Enc, 1);
7064 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7065 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7066 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7067 IEM_MC_END();
7068 }
7069 }
7070 else
7071 {
7072 /*
7073 * Register, memory.
7074 */
7075 if (enmEffOpSize == IEMMODE_64BIT)
7076 {
7077 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7078 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7080 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7081 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7082 IEM_MC_ARG(uint64_t, u64Enc, 2);
7083 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7084 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7085 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7086 IEM_MC_END();
7087 }
7088 else
7089 {
7090 IEM_MC_BEGIN(0, 0);
7091 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7093 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7094 IEM_MC_ARG(uint32_t, u32Enc, 2);
7095 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7096 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7097 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7098 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7099 IEM_MC_END();
7100 }
7101 }
7102}
7103#else
7104FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
7105#endif
7106/* Opcode 0x66 0x0f 0x79 - invalid */
7107/* Opcode 0xf3 0x0f 0x79 - invalid */
7108/* Opcode 0xf2 0x0f 0x79 - invalid */
7109
7110/* Opcode 0x0f 0x7a - invalid */
7111/* Opcode 0x66 0x0f 0x7a - invalid */
7112/* Opcode 0xf3 0x0f 0x7a - invalid */
7113/* Opcode 0xf2 0x0f 0x7a - invalid */
7114
7115/* Opcode 0x0f 0x7b - invalid */
7116/* Opcode 0x66 0x0f 0x7b - invalid */
7117/* Opcode 0xf3 0x0f 0x7b - invalid */
7118/* Opcode 0xf2 0x0f 0x7b - invalid */
7119
7120/* Opcode 0x0f 0x7c - invalid */
7121
7122
7123/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7124FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7125{
7126 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7127 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7128}
7129
7130
7131/* Opcode 0xf3 0x0f 0x7c - invalid */
7132
7133
7134/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7135FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7136{
7137 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7138 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7139}
7140
7141
7142/* Opcode 0x0f 0x7d - invalid */
7143
7144
7145/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7146FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7147{
7148 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7149 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7150}
7151
7152
7153/* Opcode 0xf3 0x0f 0x7d - invalid */
7154
7155
7156/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7157FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7158{
7159 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7160 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7161}
7162
7163
7164/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7165FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7166{
7167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7168 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7169 {
7170 /**
7171 * @opcode 0x7e
7172 * @opcodesub rex.w=1
7173 * @oppfx none
7174 * @opcpuid mmx
7175 * @opgroup og_mmx_datamove
7176 * @opxcpttype 5
7177 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7178 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7179 */
7180 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7181 if (IEM_IS_MODRM_REG_MODE(bRm))
7182 {
7183 /* greg64, MMX */
7184 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7186 IEM_MC_LOCAL(uint64_t, u64Tmp);
7187
7188 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7189 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7190 IEM_MC_FPU_TO_MMX_MODE();
7191
7192 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7193 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7194
7195 IEM_MC_ADVANCE_RIP_AND_FINISH();
7196 IEM_MC_END();
7197 }
7198 else
7199 {
7200 /* [mem64], MMX */
7201 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7203 IEM_MC_LOCAL(uint64_t, u64Tmp);
7204
7205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7207 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7208 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7209
7210 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7211 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7212 IEM_MC_FPU_TO_MMX_MODE();
7213
7214 IEM_MC_ADVANCE_RIP_AND_FINISH();
7215 IEM_MC_END();
7216 }
7217 }
7218 else
7219 {
7220 /**
7221 * @opdone
7222 * @opcode 0x7e
7223 * @opcodesub rex.w=0
7224 * @oppfx none
7225 * @opcpuid mmx
7226 * @opgroup og_mmx_datamove
7227 * @opxcpttype 5
7228 * @opfunction iemOp_movd_q_Pd_Ey
7229 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7230 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7231 */
7232 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7233 if (IEM_IS_MODRM_REG_MODE(bRm))
7234 {
7235 /* greg32, MMX */
7236 IEM_MC_BEGIN(0, 0);
7237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7238 IEM_MC_LOCAL(uint32_t, u32Tmp);
7239
7240 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7241 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7242 IEM_MC_FPU_TO_MMX_MODE();
7243
7244 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7245 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7246
7247 IEM_MC_ADVANCE_RIP_AND_FINISH();
7248 IEM_MC_END();
7249 }
7250 else
7251 {
7252 /* [mem32], MMX */
7253 IEM_MC_BEGIN(0, 0);
7254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7255 IEM_MC_LOCAL(uint32_t, u32Tmp);
7256
7257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7259 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7260 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7261
7262 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7263 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7264 IEM_MC_FPU_TO_MMX_MODE();
7265
7266 IEM_MC_ADVANCE_RIP_AND_FINISH();
7267 IEM_MC_END();
7268 }
7269 }
7270}
7271
7272
7273FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7274{
7275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7276 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7277 {
7278 /**
7279 * @opcode 0x7e
7280 * @opcodesub rex.w=1
7281 * @oppfx 0x66
7282 * @opcpuid sse2
7283 * @opgroup og_sse2_simdint_datamove
7284 * @opxcpttype 5
7285 * @optest 64-bit / op1=1 op2=2 -> op1=2
7286 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7287 */
7288 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7289 if (IEM_IS_MODRM_REG_MODE(bRm))
7290 {
7291 /* greg64, XMM */
7292 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7294 IEM_MC_LOCAL(uint64_t, u64Tmp);
7295
7296 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7297 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7298
7299 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7300 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7301
7302 IEM_MC_ADVANCE_RIP_AND_FINISH();
7303 IEM_MC_END();
7304 }
7305 else
7306 {
7307 /* [mem64], XMM */
7308 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7310 IEM_MC_LOCAL(uint64_t, u64Tmp);
7311
7312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7314 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7315 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7316
7317 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7318 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7319
7320 IEM_MC_ADVANCE_RIP_AND_FINISH();
7321 IEM_MC_END();
7322 }
7323 }
7324 else
7325 {
7326 /**
7327 * @opdone
7328 * @opcode 0x7e
7329 * @opcodesub rex.w=0
7330 * @oppfx 0x66
7331 * @opcpuid sse2
7332 * @opgroup og_sse2_simdint_datamove
7333 * @opxcpttype 5
7334 * @opfunction iemOp_movd_q_Vy_Ey
7335 * @optest op1=1 op2=2 -> op1=2
7336 * @optest op1=0 op2=-42 -> op1=-42
7337 */
7338 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7339 if (IEM_IS_MODRM_REG_MODE(bRm))
7340 {
7341 /* greg32, XMM */
7342 IEM_MC_BEGIN(0, 0);
7343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7344 IEM_MC_LOCAL(uint32_t, u32Tmp);
7345
7346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7348
7349 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7350 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7351
7352 IEM_MC_ADVANCE_RIP_AND_FINISH();
7353 IEM_MC_END();
7354 }
7355 else
7356 {
7357 /* [mem32], XMM */
7358 IEM_MC_BEGIN(0, 0);
7359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7360 IEM_MC_LOCAL(uint32_t, u32Tmp);
7361
7362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7364 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7365 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7366
7367 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7368 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7369
7370 IEM_MC_ADVANCE_RIP_AND_FINISH();
7371 IEM_MC_END();
7372 }
7373 }
7374}
7375
7376/**
7377 * @opcode 0x7e
7378 * @oppfx 0xf3
7379 * @opcpuid sse2
7380 * @opgroup og_sse2_pcksclr_datamove
7381 * @opxcpttype none
7382 * @optest op1=1 op2=2 -> op1=2
7383 * @optest op1=0 op2=-42 -> op1=-42
7384 */
7385FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7386{
7387 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7389 if (IEM_IS_MODRM_REG_MODE(bRm))
7390 {
7391 /*
7392 * XMM128, XMM64.
7393 */
7394 IEM_MC_BEGIN(0, 0);
7395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7396 IEM_MC_LOCAL(uint64_t, uSrc);
7397
7398 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7399 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7400
7401 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7402 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7403
7404 IEM_MC_ADVANCE_RIP_AND_FINISH();
7405 IEM_MC_END();
7406 }
7407 else
7408 {
7409 /*
7410 * XMM128, [mem64].
7411 */
7412 IEM_MC_BEGIN(0, 0);
7413 IEM_MC_LOCAL(uint64_t, uSrc);
7414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7415
7416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7419 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7420
7421 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7422 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7423
7424 IEM_MC_ADVANCE_RIP_AND_FINISH();
7425 IEM_MC_END();
7426 }
7427}
7428
7429/* Opcode 0xf2 0x0f 0x7e - invalid */
7430
7431
7432/** Opcode 0x0f 0x7f - movq Qq, Pq */
7433FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7434{
7435 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7437 if (IEM_IS_MODRM_REG_MODE(bRm))
7438 {
7439 /*
7440 * MMX, MMX.
7441 */
7442 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7443 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7444 IEM_MC_BEGIN(0, 0);
7445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7446 IEM_MC_LOCAL(uint64_t, u64Tmp);
7447 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7448 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7449 IEM_MC_FPU_TO_MMX_MODE();
7450
7451 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7452 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7453
7454 IEM_MC_ADVANCE_RIP_AND_FINISH();
7455 IEM_MC_END();
7456 }
7457 else
7458 {
7459 /*
7460 * [mem64], MMX.
7461 */
7462 IEM_MC_BEGIN(0, 0);
7463 IEM_MC_LOCAL(uint64_t, u64Tmp);
7464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7465
7466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7468 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7469 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7470
7471 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7472 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7473 IEM_MC_FPU_TO_MMX_MODE();
7474
7475 IEM_MC_ADVANCE_RIP_AND_FINISH();
7476 IEM_MC_END();
7477 }
7478}
7479
7480/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7481FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7482{
7483 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7485 if (IEM_IS_MODRM_REG_MODE(bRm))
7486 {
7487 /*
7488 * XMM, XMM.
7489 */
7490 IEM_MC_BEGIN(0, 0);
7491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7492 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7493 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7494 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7495 IEM_GET_MODRM_REG(pVCpu, bRm));
7496 IEM_MC_ADVANCE_RIP_AND_FINISH();
7497 IEM_MC_END();
7498 }
7499 else
7500 {
7501 /*
7502 * [mem128], XMM.
7503 */
7504 IEM_MC_BEGIN(0, 0);
7505 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7507
7508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7510 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7511 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7512
7513 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7514 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7515
7516 IEM_MC_ADVANCE_RIP_AND_FINISH();
7517 IEM_MC_END();
7518 }
7519}
7520
7521/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7522FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7523{
7524 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7526 if (IEM_IS_MODRM_REG_MODE(bRm))
7527 {
7528 /*
7529 * XMM, XMM.
7530 */
7531 IEM_MC_BEGIN(0, 0);
7532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7535 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7536 IEM_GET_MODRM_REG(pVCpu, bRm));
7537 IEM_MC_ADVANCE_RIP_AND_FINISH();
7538 IEM_MC_END();
7539 }
7540 else
7541 {
7542 /*
7543 * [mem128], XMM.
7544 */
7545 IEM_MC_BEGIN(0, 0);
7546 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7548
7549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7552 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7553
7554 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7555 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7556
7557 IEM_MC_ADVANCE_RIP_AND_FINISH();
7558 IEM_MC_END();
7559 }
7560}
7561
7562/* Opcode 0xf2 0x0f 0x7f - invalid */
7563
7564
7565/**
7566 * @opcode 0x80
7567 * @opfltest of
7568 */
7569FNIEMOP_DEF(iemOp_jo_Jv)
7570{
7571 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7572 IEMOP_HLP_MIN_386();
7573 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7574 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7575 {
7576 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7577 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7579 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7580 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7581 } IEM_MC_ELSE() {
7582 IEM_MC_ADVANCE_RIP_AND_FINISH();
7583 } IEM_MC_ENDIF();
7584 IEM_MC_END();
7585 }
7586 else
7587 {
7588 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7589 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7591 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7592 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7593 } IEM_MC_ELSE() {
7594 IEM_MC_ADVANCE_RIP_AND_FINISH();
7595 } IEM_MC_ENDIF();
7596 IEM_MC_END();
7597 }
7598}
7599
7600
7601/**
7602 * @opcode 0x81
7603 * @opfltest of
7604 */
7605FNIEMOP_DEF(iemOp_jno_Jv)
7606{
7607 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7608 IEMOP_HLP_MIN_386();
7609 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7610 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7611 {
7612 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7613 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7616 IEM_MC_ADVANCE_RIP_AND_FINISH();
7617 } IEM_MC_ELSE() {
7618 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7619 } IEM_MC_ENDIF();
7620 IEM_MC_END();
7621 }
7622 else
7623 {
7624 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7625 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7627 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7628 IEM_MC_ADVANCE_RIP_AND_FINISH();
7629 } IEM_MC_ELSE() {
7630 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7631 } IEM_MC_ENDIF();
7632 IEM_MC_END();
7633 }
7634}
7635
7636
7637/**
7638 * @opcode 0x82
7639 * @opfltest cf
7640 */
7641FNIEMOP_DEF(iemOp_jc_Jv)
7642{
7643 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7644 IEMOP_HLP_MIN_386();
7645 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7646 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7647 {
7648 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7649 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7651 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7652 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7653 } IEM_MC_ELSE() {
7654 IEM_MC_ADVANCE_RIP_AND_FINISH();
7655 } IEM_MC_ENDIF();
7656 IEM_MC_END();
7657 }
7658 else
7659 {
7660 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7661 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7663 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7664 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7665 } IEM_MC_ELSE() {
7666 IEM_MC_ADVANCE_RIP_AND_FINISH();
7667 } IEM_MC_ENDIF();
7668 IEM_MC_END();
7669 }
7670}
7671
7672
7673/**
7674 * @opcode 0x83
7675 * @opfltest cf
7676 */
7677FNIEMOP_DEF(iemOp_jnc_Jv)
7678{
7679 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7680 IEMOP_HLP_MIN_386();
7681 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7682 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7683 {
7684 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7685 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7687 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7688 IEM_MC_ADVANCE_RIP_AND_FINISH();
7689 } IEM_MC_ELSE() {
7690 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7691 } IEM_MC_ENDIF();
7692 IEM_MC_END();
7693 }
7694 else
7695 {
7696 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7697 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7700 IEM_MC_ADVANCE_RIP_AND_FINISH();
7701 } IEM_MC_ELSE() {
7702 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7703 } IEM_MC_ENDIF();
7704 IEM_MC_END();
7705 }
7706}
7707
7708
7709/**
7710 * @opcode 0x84
7711 * @opfltest zf
7712 */
7713FNIEMOP_DEF(iemOp_je_Jv)
7714{
7715 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7716 IEMOP_HLP_MIN_386();
7717 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7718 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7719 {
7720 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7721 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7724 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7725 } IEM_MC_ELSE() {
7726 IEM_MC_ADVANCE_RIP_AND_FINISH();
7727 } IEM_MC_ENDIF();
7728 IEM_MC_END();
7729 }
7730 else
7731 {
7732 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7733 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7736 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7737 } IEM_MC_ELSE() {
7738 IEM_MC_ADVANCE_RIP_AND_FINISH();
7739 } IEM_MC_ENDIF();
7740 IEM_MC_END();
7741 }
7742}
7743
7744
7745/**
7746 * @opcode 0x85
7747 * @opfltest zf
7748 */
7749FNIEMOP_DEF(iemOp_jne_Jv)
7750{
7751 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7752 IEMOP_HLP_MIN_386();
7753 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7754 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7755 {
7756 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7757 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7759 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7760 IEM_MC_ADVANCE_RIP_AND_FINISH();
7761 } IEM_MC_ELSE() {
7762 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7763 } IEM_MC_ENDIF();
7764 IEM_MC_END();
7765 }
7766 else
7767 {
7768 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7769 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7771 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7772 IEM_MC_ADVANCE_RIP_AND_FINISH();
7773 } IEM_MC_ELSE() {
7774 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7775 } IEM_MC_ENDIF();
7776 IEM_MC_END();
7777 }
7778}
7779
7780
7781/**
7782 * @opcode 0x86
7783 * @opfltest cf,zf
7784 */
7785FNIEMOP_DEF(iemOp_jbe_Jv)
7786{
7787 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7788 IEMOP_HLP_MIN_386();
7789 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7790 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7791 {
7792 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7793 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7795 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7796 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7797 } IEM_MC_ELSE() {
7798 IEM_MC_ADVANCE_RIP_AND_FINISH();
7799 } IEM_MC_ENDIF();
7800 IEM_MC_END();
7801 }
7802 else
7803 {
7804 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7805 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7807 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7808 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7809 } IEM_MC_ELSE() {
7810 IEM_MC_ADVANCE_RIP_AND_FINISH();
7811 } IEM_MC_ENDIF();
7812 IEM_MC_END();
7813 }
7814}
7815
7816
7817/**
7818 * @opcode 0x87
7819 * @opfltest cf,zf
7820 */
7821FNIEMOP_DEF(iemOp_jnbe_Jv)
7822{
7823 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7824 IEMOP_HLP_MIN_386();
7825 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7826 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7827 {
7828 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7829 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7831 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7832 IEM_MC_ADVANCE_RIP_AND_FINISH();
7833 } IEM_MC_ELSE() {
7834 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7835 } IEM_MC_ENDIF();
7836 IEM_MC_END();
7837 }
7838 else
7839 {
7840 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7841 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7843 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7844 IEM_MC_ADVANCE_RIP_AND_FINISH();
7845 } IEM_MC_ELSE() {
7846 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7847 } IEM_MC_ENDIF();
7848 IEM_MC_END();
7849 }
7850}
7851
7852
7853/**
7854 * @opcode 0x88
7855 * @opfltest sf
7856 */
7857FNIEMOP_DEF(iemOp_js_Jv)
7858{
7859 IEMOP_MNEMONIC(js_Jv, "js Jv");
7860 IEMOP_HLP_MIN_386();
7861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7862 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7863 {
7864 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7865 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7867 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7868 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7869 } IEM_MC_ELSE() {
7870 IEM_MC_ADVANCE_RIP_AND_FINISH();
7871 } IEM_MC_ENDIF();
7872 IEM_MC_END();
7873 }
7874 else
7875 {
7876 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7877 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7880 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7881 } IEM_MC_ELSE() {
7882 IEM_MC_ADVANCE_RIP_AND_FINISH();
7883 } IEM_MC_ENDIF();
7884 IEM_MC_END();
7885 }
7886}
7887
7888
7889/**
7890 * @opcode 0x89
7891 * @opfltest sf
7892 */
7893FNIEMOP_DEF(iemOp_jns_Jv)
7894{
7895 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7896 IEMOP_HLP_MIN_386();
7897 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7898 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7899 {
7900 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7901 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7903 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7904 IEM_MC_ADVANCE_RIP_AND_FINISH();
7905 } IEM_MC_ELSE() {
7906 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7907 } IEM_MC_ENDIF();
7908 IEM_MC_END();
7909 }
7910 else
7911 {
7912 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7913 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7916 IEM_MC_ADVANCE_RIP_AND_FINISH();
7917 } IEM_MC_ELSE() {
7918 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7919 } IEM_MC_ENDIF();
7920 IEM_MC_END();
7921 }
7922}
7923
7924
7925/**
7926 * @opcode 0x8a
7927 * @opfltest pf
7928 */
7929FNIEMOP_DEF(iemOp_jp_Jv)
7930{
7931 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7932 IEMOP_HLP_MIN_386();
7933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7934 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7935 {
7936 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7937 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7940 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7941 } IEM_MC_ELSE() {
7942 IEM_MC_ADVANCE_RIP_AND_FINISH();
7943 } IEM_MC_ENDIF();
7944 IEM_MC_END();
7945 }
7946 else
7947 {
7948 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7949 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7952 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7953 } IEM_MC_ELSE() {
7954 IEM_MC_ADVANCE_RIP_AND_FINISH();
7955 } IEM_MC_ENDIF();
7956 IEM_MC_END();
7957 }
7958}
7959
7960
7961/**
7962 * @opcode 0x8b
7963 * @opfltest pf
7964 */
7965FNIEMOP_DEF(iemOp_jnp_Jv)
7966{
7967 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7968 IEMOP_HLP_MIN_386();
7969 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7970 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7971 {
7972 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7973 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7975 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7976 IEM_MC_ADVANCE_RIP_AND_FINISH();
7977 } IEM_MC_ELSE() {
7978 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7979 } IEM_MC_ENDIF();
7980 IEM_MC_END();
7981 }
7982 else
7983 {
7984 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7985 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7987 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7988 IEM_MC_ADVANCE_RIP_AND_FINISH();
7989 } IEM_MC_ELSE() {
7990 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7991 } IEM_MC_ENDIF();
7992 IEM_MC_END();
7993 }
7994}
7995
7996
7997/**
7998 * @opcode 0x8c
7999 * @opfltest sf,of
8000 */
8001FNIEMOP_DEF(iemOp_jl_Jv)
8002{
8003 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8004 IEMOP_HLP_MIN_386();
8005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8006 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8007 {
8008 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8009 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8011 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8012 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8013 } IEM_MC_ELSE() {
8014 IEM_MC_ADVANCE_RIP_AND_FINISH();
8015 } IEM_MC_ENDIF();
8016 IEM_MC_END();
8017 }
8018 else
8019 {
8020 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8021 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8023 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8024 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8025 } IEM_MC_ELSE() {
8026 IEM_MC_ADVANCE_RIP_AND_FINISH();
8027 } IEM_MC_ENDIF();
8028 IEM_MC_END();
8029 }
8030}
8031
8032
8033/**
8034 * @opcode 0x8d
8035 * @opfltest sf,of
8036 */
8037FNIEMOP_DEF(iemOp_jnl_Jv)
8038{
8039 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8040 IEMOP_HLP_MIN_386();
8041 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8042 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8043 {
8044 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8045 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8047 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8048 IEM_MC_ADVANCE_RIP_AND_FINISH();
8049 } IEM_MC_ELSE() {
8050 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8051 } IEM_MC_ENDIF();
8052 IEM_MC_END();
8053 }
8054 else
8055 {
8056 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8057 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8059 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8060 IEM_MC_ADVANCE_RIP_AND_FINISH();
8061 } IEM_MC_ELSE() {
8062 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8063 } IEM_MC_ENDIF();
8064 IEM_MC_END();
8065 }
8066}
8067
8068
8069/**
8070 * @opcode 0x8e
8071 * @opfltest zf,sf,of
8072 */
8073FNIEMOP_DEF(iemOp_jle_Jv)
8074{
8075 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8076 IEMOP_HLP_MIN_386();
8077 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8078 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8079 {
8080 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8081 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8083 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8084 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8085 } IEM_MC_ELSE() {
8086 IEM_MC_ADVANCE_RIP_AND_FINISH();
8087 } IEM_MC_ENDIF();
8088 IEM_MC_END();
8089 }
8090 else
8091 {
8092 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8093 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8095 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8096 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8097 } IEM_MC_ELSE() {
8098 IEM_MC_ADVANCE_RIP_AND_FINISH();
8099 } IEM_MC_ENDIF();
8100 IEM_MC_END();
8101 }
8102}
8103
8104
8105/**
8106 * @opcode 0x8f
8107 * @opfltest zf,sf,of
8108 */
8109FNIEMOP_DEF(iemOp_jnle_Jv)
8110{
8111 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8112 IEMOP_HLP_MIN_386();
8113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8114 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8115 {
8116 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8117 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8119 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8120 IEM_MC_ADVANCE_RIP_AND_FINISH();
8121 } IEM_MC_ELSE() {
8122 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8123 } IEM_MC_ENDIF();
8124 IEM_MC_END();
8125 }
8126 else
8127 {
8128 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8129 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8131 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8132 IEM_MC_ADVANCE_RIP_AND_FINISH();
8133 } IEM_MC_ELSE() {
8134 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8135 } IEM_MC_ENDIF();
8136 IEM_MC_END();
8137 }
8138}
8139
8140
8141/**
8142 * @opcode 0x90
8143 * @opfltest of
8144 */
8145FNIEMOP_DEF(iemOp_seto_Eb)
8146{
8147 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8148 IEMOP_HLP_MIN_386();
8149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8150
8151 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8152 * any way. AMD says it's "unused", whatever that means. We're
8153 * ignoring for now. */
8154 if (IEM_IS_MODRM_REG_MODE(bRm))
8155 {
8156 /* register target */
8157 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8159 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8160 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8161 } IEM_MC_ELSE() {
8162 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8163 } IEM_MC_ENDIF();
8164 IEM_MC_ADVANCE_RIP_AND_FINISH();
8165 IEM_MC_END();
8166 }
8167 else
8168 {
8169 /* memory target */
8170 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8174 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8175 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8176 } IEM_MC_ELSE() {
8177 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8178 } IEM_MC_ENDIF();
8179 IEM_MC_ADVANCE_RIP_AND_FINISH();
8180 IEM_MC_END();
8181 }
8182}
8183
8184
8185/**
8186 * @opcode 0x91
8187 * @opfltest of
8188 */
8189FNIEMOP_DEF(iemOp_setno_Eb)
8190{
8191 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8192 IEMOP_HLP_MIN_386();
8193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8194
8195 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8196 * any way. AMD says it's "unused", whatever that means. We're
8197 * ignoring for now. */
8198 if (IEM_IS_MODRM_REG_MODE(bRm))
8199 {
8200 /* register target */
8201 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8203 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8204 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8205 } IEM_MC_ELSE() {
8206 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8207 } IEM_MC_ENDIF();
8208 IEM_MC_ADVANCE_RIP_AND_FINISH();
8209 IEM_MC_END();
8210 }
8211 else
8212 {
8213 /* memory target */
8214 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8218 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8219 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8220 } IEM_MC_ELSE() {
8221 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8222 } IEM_MC_ENDIF();
8223 IEM_MC_ADVANCE_RIP_AND_FINISH();
8224 IEM_MC_END();
8225 }
8226}
8227
8228
8229/**
8230 * @opcode 0x92
8231 * @opfltest cf
8232 */
8233FNIEMOP_DEF(iemOp_setc_Eb)
8234{
8235 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8236 IEMOP_HLP_MIN_386();
8237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8238
8239 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8240 * any way. AMD says it's "unused", whatever that means. We're
8241 * ignoring for now. */
8242 if (IEM_IS_MODRM_REG_MODE(bRm))
8243 {
8244 /* register target */
8245 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8247 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8248 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8249 } IEM_MC_ELSE() {
8250 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8251 } IEM_MC_ENDIF();
8252 IEM_MC_ADVANCE_RIP_AND_FINISH();
8253 IEM_MC_END();
8254 }
8255 else
8256 {
8257 /* memory target */
8258 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8262 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8263 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8264 } IEM_MC_ELSE() {
8265 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8266 } IEM_MC_ENDIF();
8267 IEM_MC_ADVANCE_RIP_AND_FINISH();
8268 IEM_MC_END();
8269 }
8270}
8271
8272
8273/**
8274 * @opcode 0x93
8275 * @opfltest cf
8276 */
8277FNIEMOP_DEF(iemOp_setnc_Eb)
8278{
8279 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8280 IEMOP_HLP_MIN_386();
8281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8282
8283 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8284 * any way. AMD says it's "unused", whatever that means. We're
8285 * ignoring for now. */
8286 if (IEM_IS_MODRM_REG_MODE(bRm))
8287 {
8288 /* register target */
8289 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8291 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8292 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8293 } IEM_MC_ELSE() {
8294 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8295 } IEM_MC_ENDIF();
8296 IEM_MC_ADVANCE_RIP_AND_FINISH();
8297 IEM_MC_END();
8298 }
8299 else
8300 {
8301 /* memory target */
8302 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8306 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8307 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8308 } IEM_MC_ELSE() {
8309 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8310 } IEM_MC_ENDIF();
8311 IEM_MC_ADVANCE_RIP_AND_FINISH();
8312 IEM_MC_END();
8313 }
8314}
8315
8316
8317/**
8318 * @opcode 0x94
8319 * @opfltest zf
8320 */
8321FNIEMOP_DEF(iemOp_sete_Eb)
8322{
8323 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8324 IEMOP_HLP_MIN_386();
8325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8326
8327 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8328 * any way. AMD says it's "unused", whatever that means. We're
8329 * ignoring for now. */
8330 if (IEM_IS_MODRM_REG_MODE(bRm))
8331 {
8332 /* register target */
8333 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8335 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8336 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8337 } IEM_MC_ELSE() {
8338 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8339 } IEM_MC_ENDIF();
8340 IEM_MC_ADVANCE_RIP_AND_FINISH();
8341 IEM_MC_END();
8342 }
8343 else
8344 {
8345 /* memory target */
8346 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8350 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8351 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8352 } IEM_MC_ELSE() {
8353 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8354 } IEM_MC_ENDIF();
8355 IEM_MC_ADVANCE_RIP_AND_FINISH();
8356 IEM_MC_END();
8357 }
8358}
8359
8360
8361/**
8362 * @opcode 0x95
8363 * @opfltest zf
8364 */
8365FNIEMOP_DEF(iemOp_setne_Eb)
8366{
8367 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8368 IEMOP_HLP_MIN_386();
8369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8370
8371 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8372 * any way. AMD says it's "unused", whatever that means. We're
8373 * ignoring for now. */
8374 if (IEM_IS_MODRM_REG_MODE(bRm))
8375 {
8376 /* register target */
8377 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8379 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8380 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8381 } IEM_MC_ELSE() {
8382 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8383 } IEM_MC_ENDIF();
8384 IEM_MC_ADVANCE_RIP_AND_FINISH();
8385 IEM_MC_END();
8386 }
8387 else
8388 {
8389 /* memory target */
8390 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8394 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8395 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8396 } IEM_MC_ELSE() {
8397 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8398 } IEM_MC_ENDIF();
8399 IEM_MC_ADVANCE_RIP_AND_FINISH();
8400 IEM_MC_END();
8401 }
8402}
8403
8404
8405/**
8406 * @opcode 0x96
8407 * @opfltest cf,zf
8408 */
8409FNIEMOP_DEF(iemOp_setbe_Eb)
8410{
8411 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8412 IEMOP_HLP_MIN_386();
8413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8414
8415 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8416 * any way. AMD says it's "unused", whatever that means. We're
8417 * ignoring for now. */
8418 if (IEM_IS_MODRM_REG_MODE(bRm))
8419 {
8420 /* register target */
8421 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8423 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8424 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8425 } IEM_MC_ELSE() {
8426 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8427 } IEM_MC_ENDIF();
8428 IEM_MC_ADVANCE_RIP_AND_FINISH();
8429 IEM_MC_END();
8430 }
8431 else
8432 {
8433 /* memory target */
8434 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8438 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8439 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8440 } IEM_MC_ELSE() {
8441 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8442 } IEM_MC_ENDIF();
8443 IEM_MC_ADVANCE_RIP_AND_FINISH();
8444 IEM_MC_END();
8445 }
8446}
8447
8448
8449/**
8450 * @opcode 0x97
8451 * @opfltest cf,zf
8452 */
8453FNIEMOP_DEF(iemOp_setnbe_Eb)
8454{
8455 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8456 IEMOP_HLP_MIN_386();
8457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8458
8459 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8460 * any way. AMD says it's "unused", whatever that means. We're
8461 * ignoring for now. */
8462 if (IEM_IS_MODRM_REG_MODE(bRm))
8463 {
8464 /* register target */
8465 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8467 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8468 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8469 } IEM_MC_ELSE() {
8470 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8471 } IEM_MC_ENDIF();
8472 IEM_MC_ADVANCE_RIP_AND_FINISH();
8473 IEM_MC_END();
8474 }
8475 else
8476 {
8477 /* memory target */
8478 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8482 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8483 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8484 } IEM_MC_ELSE() {
8485 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8486 } IEM_MC_ENDIF();
8487 IEM_MC_ADVANCE_RIP_AND_FINISH();
8488 IEM_MC_END();
8489 }
8490}
8491
8492
8493/**
8494 * @opcode 0x98
8495 * @opfltest sf
8496 */
8497FNIEMOP_DEF(iemOp_sets_Eb)
8498{
8499 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8500 IEMOP_HLP_MIN_386();
8501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8502
8503 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8504 * any way. AMD says it's "unused", whatever that means. We're
8505 * ignoring for now. */
8506 if (IEM_IS_MODRM_REG_MODE(bRm))
8507 {
8508 /* register target */
8509 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8511 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8512 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8513 } IEM_MC_ELSE() {
8514 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8515 } IEM_MC_ENDIF();
8516 IEM_MC_ADVANCE_RIP_AND_FINISH();
8517 IEM_MC_END();
8518 }
8519 else
8520 {
8521 /* memory target */
8522 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8526 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8527 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8528 } IEM_MC_ELSE() {
8529 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8530 } IEM_MC_ENDIF();
8531 IEM_MC_ADVANCE_RIP_AND_FINISH();
8532 IEM_MC_END();
8533 }
8534}
8535
8536
8537/**
8538 * @opcode 0x99
8539 * @opfltest sf
8540 */
8541FNIEMOP_DEF(iemOp_setns_Eb)
8542{
8543 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8544 IEMOP_HLP_MIN_386();
8545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8546
8547 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8548 * any way. AMD says it's "unused", whatever that means. We're
8549 * ignoring for now. */
8550 if (IEM_IS_MODRM_REG_MODE(bRm))
8551 {
8552 /* register target */
8553 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8555 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8556 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8557 } IEM_MC_ELSE() {
8558 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8559 } IEM_MC_ENDIF();
8560 IEM_MC_ADVANCE_RIP_AND_FINISH();
8561 IEM_MC_END();
8562 }
8563 else
8564 {
8565 /* memory target */
8566 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8570 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8571 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8572 } IEM_MC_ELSE() {
8573 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8574 } IEM_MC_ENDIF();
8575 IEM_MC_ADVANCE_RIP_AND_FINISH();
8576 IEM_MC_END();
8577 }
8578}
8579
8580
8581/**
8582 * @opcode 0x9a
8583 * @opfltest pf
8584 */
8585FNIEMOP_DEF(iemOp_setp_Eb)
8586{
8587 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8588 IEMOP_HLP_MIN_386();
8589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8590
8591 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8592 * any way. AMD says it's "unused", whatever that means. We're
8593 * ignoring for now. */
8594 if (IEM_IS_MODRM_REG_MODE(bRm))
8595 {
8596 /* register target */
8597 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8600 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8601 } IEM_MC_ELSE() {
8602 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8603 } IEM_MC_ENDIF();
8604 IEM_MC_ADVANCE_RIP_AND_FINISH();
8605 IEM_MC_END();
8606 }
8607 else
8608 {
8609 /* memory target */
8610 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8615 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8616 } IEM_MC_ELSE() {
8617 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8618 } IEM_MC_ENDIF();
8619 IEM_MC_ADVANCE_RIP_AND_FINISH();
8620 IEM_MC_END();
8621 }
8622}
8623
8624
8625/**
8626 * @opcode 0x9b
8627 * @opfltest pf
8628 */
8629FNIEMOP_DEF(iemOp_setnp_Eb)
8630{
8631 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8632 IEMOP_HLP_MIN_386();
8633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8634
8635 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8636 * any way. AMD says it's "unused", whatever that means. We're
8637 * ignoring for now. */
8638 if (IEM_IS_MODRM_REG_MODE(bRm))
8639 {
8640 /* register target */
8641 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8644 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8645 } IEM_MC_ELSE() {
8646 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8647 } IEM_MC_ENDIF();
8648 IEM_MC_ADVANCE_RIP_AND_FINISH();
8649 IEM_MC_END();
8650 }
8651 else
8652 {
8653 /* memory target */
8654 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8659 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8660 } IEM_MC_ELSE() {
8661 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8662 } IEM_MC_ENDIF();
8663 IEM_MC_ADVANCE_RIP_AND_FINISH();
8664 IEM_MC_END();
8665 }
8666}
8667
8668
8669/**
8670 * @opcode 0x9c
8671 * @opfltest sf,of
8672 */
8673FNIEMOP_DEF(iemOp_setl_Eb)
8674{
8675 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8676 IEMOP_HLP_MIN_386();
8677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8678
8679 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8680 * any way. AMD says it's "unused", whatever that means. We're
8681 * ignoring for now. */
8682 if (IEM_IS_MODRM_REG_MODE(bRm))
8683 {
8684 /* register target */
8685 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8687 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8688 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8689 } IEM_MC_ELSE() {
8690 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8691 } IEM_MC_ENDIF();
8692 IEM_MC_ADVANCE_RIP_AND_FINISH();
8693 IEM_MC_END();
8694 }
8695 else
8696 {
8697 /* memory target */
8698 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8702 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8703 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8704 } IEM_MC_ELSE() {
8705 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8706 } IEM_MC_ENDIF();
8707 IEM_MC_ADVANCE_RIP_AND_FINISH();
8708 IEM_MC_END();
8709 }
8710}
8711
8712
8713/**
8714 * @opcode 0x9d
8715 * @opfltest sf,of
8716 */
8717FNIEMOP_DEF(iemOp_setnl_Eb)
8718{
8719 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8720 IEMOP_HLP_MIN_386();
8721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8722
8723 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8724 * any way. AMD says it's "unused", whatever that means. We're
8725 * ignoring for now. */
8726 if (IEM_IS_MODRM_REG_MODE(bRm))
8727 {
8728 /* register target */
8729 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8731 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8732 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8733 } IEM_MC_ELSE() {
8734 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8735 } IEM_MC_ENDIF();
8736 IEM_MC_ADVANCE_RIP_AND_FINISH();
8737 IEM_MC_END();
8738 }
8739 else
8740 {
8741 /* memory target */
8742 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8746 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8747 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8748 } IEM_MC_ELSE() {
8749 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8750 } IEM_MC_ENDIF();
8751 IEM_MC_ADVANCE_RIP_AND_FINISH();
8752 IEM_MC_END();
8753 }
8754}
8755
8756
8757/**
8758 * @opcode 0x9e
8759 * @opfltest zf,sf,of
8760 */
8761FNIEMOP_DEF(iemOp_setle_Eb)
8762{
8763 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8764 IEMOP_HLP_MIN_386();
8765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8766
8767 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8768 * any way. AMD says it's "unused", whatever that means. We're
8769 * ignoring for now. */
8770 if (IEM_IS_MODRM_REG_MODE(bRm))
8771 {
8772 /* register target */
8773 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8775 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8776 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8777 } IEM_MC_ELSE() {
8778 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8779 } IEM_MC_ENDIF();
8780 IEM_MC_ADVANCE_RIP_AND_FINISH();
8781 IEM_MC_END();
8782 }
8783 else
8784 {
8785 /* memory target */
8786 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8790 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8791 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8792 } IEM_MC_ELSE() {
8793 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8794 } IEM_MC_ENDIF();
8795 IEM_MC_ADVANCE_RIP_AND_FINISH();
8796 IEM_MC_END();
8797 }
8798}
8799
8800
8801/**
8802 * @opcode 0x9f
8803 * @opfltest zf,sf,of
8804 */
8805FNIEMOP_DEF(iemOp_setnle_Eb)
8806{
8807 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8808 IEMOP_HLP_MIN_386();
8809 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8810
8811 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8812 * any way. AMD says it's "unused", whatever that means. We're
8813 * ignoring for now. */
8814 if (IEM_IS_MODRM_REG_MODE(bRm))
8815 {
8816 /* register target */
8817 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8819 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8820 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8821 } IEM_MC_ELSE() {
8822 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8823 } IEM_MC_ENDIF();
8824 IEM_MC_ADVANCE_RIP_AND_FINISH();
8825 IEM_MC_END();
8826 }
8827 else
8828 {
8829 /* memory target */
8830 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8834 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8835 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8836 } IEM_MC_ELSE() {
8837 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8838 } IEM_MC_ENDIF();
8839 IEM_MC_ADVANCE_RIP_AND_FINISH();
8840 IEM_MC_END();
8841 }
8842}
8843
8844
8845/** Opcode 0x0f 0xa0. */
8846FNIEMOP_DEF(iemOp_push_fs)
8847{
8848 IEMOP_MNEMONIC(push_fs, "push fs");
8849 IEMOP_HLP_MIN_386();
8850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8851 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8852}
8853
8854
8855/** Opcode 0x0f 0xa1. */
8856FNIEMOP_DEF(iemOp_pop_fs)
8857{
8858 IEMOP_MNEMONIC(pop_fs, "pop fs");
8859 IEMOP_HLP_MIN_386();
8860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8862 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8863 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8864 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8865 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8866 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8867 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8868 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8869}
8870
8871
8872/** Opcode 0x0f 0xa2. */
8873FNIEMOP_DEF(iemOp_cpuid)
8874{
8875 IEMOP_MNEMONIC(cpuid, "cpuid");
8876 IEMOP_HLP_MIN_486(); /* not all 486es. */
8877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8878 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8879 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8880 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8881 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8882 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8883 iemCImpl_cpuid);
8884}
8885
8886
8887/**
8888 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8889 * iemOp_bts_Ev_Gv.
8890 */
8891
8892#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8895 \
8896 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8897 { \
8898 /* register destination. */ \
8899 switch (pVCpu->iem.s.enmEffOpSize) \
8900 { \
8901 case IEMMODE_16BIT: \
8902 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8904 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8905 IEM_MC_ARG(uint16_t, u16Src, 1); \
8906 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8907 \
8908 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8909 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8910 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8911 IEM_MC_REF_EFLAGS(pEFlags); \
8912 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8913 \
8914 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8915 IEM_MC_END(); \
8916 break; \
8917 \
8918 case IEMMODE_32BIT: \
8919 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8921 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8922 IEM_MC_ARG(uint32_t, u32Src, 1); \
8923 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8924 \
8925 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8926 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8927 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8928 IEM_MC_REF_EFLAGS(pEFlags); \
8929 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8930 \
8931 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8932 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8933 IEM_MC_END(); \
8934 break; \
8935 \
8936 case IEMMODE_64BIT: \
8937 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8939 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8940 IEM_MC_ARG(uint64_t, u64Src, 1); \
8941 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8942 \
8943 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8944 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8945 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8946 IEM_MC_REF_EFLAGS(pEFlags); \
8947 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
8948 \
8949 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8950 IEM_MC_END(); \
8951 break; \
8952 \
8953 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8954 } \
8955 } \
8956 else \
8957 { \
8958 /* memory destination. */ \
8959 /** @todo test negative bit offsets! */ \
8960 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8961 { \
8962 switch (pVCpu->iem.s.enmEffOpSize) \
8963 { \
8964 case IEMMODE_16BIT: \
8965 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8968 IEMOP_HLP_DONE_DECODING(); \
8969 \
8970 IEM_MC_ARG(uint16_t, u16Src, 1); \
8971 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8972 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8973 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8974 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8975 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8976 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8977 \
8978 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8979 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8980 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8981 \
8982 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8983 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8984 \
8985 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8986 IEM_MC_COMMIT_EFLAGS(EFlags); \
8987 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8988 IEM_MC_END(); \
8989 break; \
8990 \
8991 case IEMMODE_32BIT: \
8992 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8995 IEMOP_HLP_DONE_DECODING(); \
8996 \
8997 IEM_MC_ARG(uint32_t, u32Src, 1); \
8998 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8999 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9000 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9001 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9002 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9003 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9004 \
9005 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9006 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9007 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9008 \
9009 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9010 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9011 \
9012 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9013 IEM_MC_COMMIT_EFLAGS(EFlags); \
9014 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9015 IEM_MC_END(); \
9016 break; \
9017 \
9018 case IEMMODE_64BIT: \
9019 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9022 IEMOP_HLP_DONE_DECODING(); \
9023 \
9024 IEM_MC_ARG(uint64_t, u64Src, 1); \
9025 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9026 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9027 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9028 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9029 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9030 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9031 \
9032 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9033 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9034 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9035 \
9036 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9037 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9038 \
9039 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9040 IEM_MC_COMMIT_EFLAGS(EFlags); \
9041 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9042 IEM_MC_END(); \
9043 break; \
9044 \
9045 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9046 } \
9047 } \
9048 else \
9049 { \
9050 (void)0
9051/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9052#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9053 switch (pVCpu->iem.s.enmEffOpSize) \
9054 { \
9055 case IEMMODE_16BIT: \
9056 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9059 IEMOP_HLP_DONE_DECODING(); \
9060 \
9061 IEM_MC_ARG(uint16_t, u16Src, 1); \
9062 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9063 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9064 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9065 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9066 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9067 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9068 \
9069 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9070 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9071 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9072 \
9073 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9074 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9075 \
9076 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9077 IEM_MC_COMMIT_EFLAGS(EFlags); \
9078 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9079 IEM_MC_END(); \
9080 break; \
9081 \
9082 case IEMMODE_32BIT: \
9083 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9086 IEMOP_HLP_DONE_DECODING(); \
9087 \
9088 IEM_MC_ARG(uint32_t, u32Src, 1); \
9089 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9090 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9091 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9092 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9093 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9094 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9095 \
9096 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9097 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9098 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9099 \
9100 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9101 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9102 \
9103 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9104 IEM_MC_COMMIT_EFLAGS(EFlags); \
9105 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9106 IEM_MC_END(); \
9107 break; \
9108 \
9109 case IEMMODE_64BIT: \
9110 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9113 IEMOP_HLP_DONE_DECODING(); \
9114 \
9115 IEM_MC_ARG(uint64_t, u64Src, 1); \
9116 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9117 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9118 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9119 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9120 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9121 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9122 \
9123 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9124 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9125 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9126 \
9127 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9128 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9129 \
9130 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9131 IEM_MC_COMMIT_EFLAGS(EFlags); \
9132 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9133 IEM_MC_END(); \
9134 break; \
9135 \
9136 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9137 } \
9138 } \
9139 } \
9140 (void)0
9141
9142/* Read-only version (bt). */
9143#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9145 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9146 \
9147 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9148 { \
9149 /* register destination. */ \
9150 switch (pVCpu->iem.s.enmEffOpSize) \
9151 { \
9152 case IEMMODE_16BIT: \
9153 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9155 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9156 IEM_MC_ARG(uint16_t, u16Src, 1); \
9157 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9158 \
9159 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9160 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9161 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9162 IEM_MC_REF_EFLAGS(pEFlags); \
9163 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9164 \
9165 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9166 IEM_MC_END(); \
9167 break; \
9168 \
9169 case IEMMODE_32BIT: \
9170 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9172 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9173 IEM_MC_ARG(uint32_t, u32Src, 1); \
9174 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9175 \
9176 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9177 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9178 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9179 IEM_MC_REF_EFLAGS(pEFlags); \
9180 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9181 \
9182 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9183 IEM_MC_END(); \
9184 break; \
9185 \
9186 case IEMMODE_64BIT: \
9187 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9189 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9190 IEM_MC_ARG(uint64_t, u64Src, 1); \
9191 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9192 \
9193 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9194 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9195 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9196 IEM_MC_REF_EFLAGS(pEFlags); \
9197 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9198 \
9199 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9200 IEM_MC_END(); \
9201 break; \
9202 \
9203 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9204 } \
9205 } \
9206 else \
9207 { \
9208 /* memory destination. */ \
9209 /** @todo test negative bit offsets! */ \
9210 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9211 { \
9212 switch (pVCpu->iem.s.enmEffOpSize) \
9213 { \
9214 case IEMMODE_16BIT: \
9215 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9218 IEMOP_HLP_DONE_DECODING(); \
9219 \
9220 IEM_MC_ARG(uint16_t, u16Src, 1); \
9221 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9222 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9223 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9224 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9225 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9226 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9227 \
9228 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9229 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9230 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9231 \
9232 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9233 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9234 \
9235 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9236 IEM_MC_COMMIT_EFLAGS(EFlags); \
9237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9238 IEM_MC_END(); \
9239 break; \
9240 \
9241 case IEMMODE_32BIT: \
9242 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9245 IEMOP_HLP_DONE_DECODING(); \
9246 \
9247 IEM_MC_ARG(uint32_t, u32Src, 1); \
9248 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9249 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9250 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9251 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9252 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9253 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9254 \
9255 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9256 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9257 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9258 \
9259 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9260 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9261 \
9262 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9263 IEM_MC_COMMIT_EFLAGS(EFlags); \
9264 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9265 IEM_MC_END(); \
9266 break; \
9267 \
9268 case IEMMODE_64BIT: \
9269 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9272 IEMOP_HLP_DONE_DECODING(); \
9273 \
9274 IEM_MC_ARG(uint64_t, u64Src, 1); \
9275 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9276 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9277 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9278 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9279 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9280 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9281 \
9282 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9283 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9284 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9285 \
9286 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9287 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9288 \
9289 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9290 IEM_MC_COMMIT_EFLAGS(EFlags); \
9291 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9292 IEM_MC_END(); \
9293 break; \
9294 \
9295 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9296 } \
9297 } \
9298 else \
9299 { \
9300 IEMOP_HLP_DONE_DECODING(); \
9301 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9302 } \
9303 } \
9304 (void)0
9305
9306
9307/**
9308 * @opcode 0xa3
9309 * @oppfx n/a
9310 * @opflclass bitmap
9311 */
9312FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9313{
9314 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9315 IEMOP_HLP_MIN_386();
9316 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9317}
9318
9319
9320/**
9321 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9322 */
9323#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9324 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9325 \
9326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9327 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9328 \
9329 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9330 { \
9331 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9332 \
9333 switch (pVCpu->iem.s.enmEffOpSize) \
9334 { \
9335 case IEMMODE_16BIT: \
9336 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9339 IEM_MC_ARG(uint16_t, u16Src, 1); \
9340 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9341 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9342 \
9343 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9344 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9345 IEM_MC_REF_EFLAGS(pEFlags); \
9346 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9347 \
9348 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9349 IEM_MC_END(); \
9350 break; \
9351 \
9352 case IEMMODE_32BIT: \
9353 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9355 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9356 IEM_MC_ARG(uint32_t, u32Src, 1); \
9357 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9358 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9359 \
9360 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9361 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9362 IEM_MC_REF_EFLAGS(pEFlags); \
9363 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9364 \
9365 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9366 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9367 IEM_MC_END(); \
9368 break; \
9369 \
9370 case IEMMODE_64BIT: \
9371 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9373 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9374 IEM_MC_ARG(uint64_t, u64Src, 1); \
9375 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9376 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9377 \
9378 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9379 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9380 IEM_MC_REF_EFLAGS(pEFlags); \
9381 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9382 \
9383 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9384 IEM_MC_END(); \
9385 break; \
9386 \
9387 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9388 } \
9389 } \
9390 else \
9391 { \
9392 switch (pVCpu->iem.s.enmEffOpSize) \
9393 { \
9394 case IEMMODE_16BIT: \
9395 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9398 \
9399 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9401 \
9402 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9403 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9404 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9405 \
9406 IEM_MC_ARG(uint16_t, u16Src, 1); \
9407 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9408 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9409 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9410 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9411 \
9412 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9413 IEM_MC_COMMIT_EFLAGS(EFlags); \
9414 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9415 IEM_MC_END(); \
9416 break; \
9417 \
9418 case IEMMODE_32BIT: \
9419 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9422 \
9423 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9425 \
9426 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9427 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9428 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9429 \
9430 IEM_MC_ARG(uint32_t, u32Src, 1); \
9431 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9432 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9433 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9434 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9435 \
9436 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9437 IEM_MC_COMMIT_EFLAGS(EFlags); \
9438 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9439 IEM_MC_END(); \
9440 break; \
9441 \
9442 case IEMMODE_64BIT: \
9443 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9446 \
9447 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9449 \
9450 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9451 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9452 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9453 \
9454 IEM_MC_ARG(uint64_t, u64Src, 1); \
9455 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9456 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9457 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9458 \
9459 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9460 \
9461 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9462 IEM_MC_COMMIT_EFLAGS(EFlags); \
9463 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9464 IEM_MC_END(); \
9465 break; \
9466 \
9467 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9468 } \
9469 } (void)0
9470
9471
9472/**
9473 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9474 */
9475#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9476 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9477 \
9478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9479 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9480 \
9481 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9482 { \
9483 switch (pVCpu->iem.s.enmEffOpSize) \
9484 { \
9485 case IEMMODE_16BIT: \
9486 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9488 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9489 IEM_MC_ARG(uint16_t, u16Src, 1); \
9490 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9491 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9492 \
9493 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9494 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9495 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9496 IEM_MC_REF_EFLAGS(pEFlags); \
9497 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9498 \
9499 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9500 IEM_MC_END(); \
9501 break; \
9502 \
9503 case IEMMODE_32BIT: \
9504 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9506 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9507 IEM_MC_ARG(uint32_t, u32Src, 1); \
9508 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9509 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9510 \
9511 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9512 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9513 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9514 IEM_MC_REF_EFLAGS(pEFlags); \
9515 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9516 \
9517 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9518 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9519 IEM_MC_END(); \
9520 break; \
9521 \
9522 case IEMMODE_64BIT: \
9523 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9525 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9526 IEM_MC_ARG(uint64_t, u64Src, 1); \
9527 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9528 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9529 \
9530 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9531 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9533 IEM_MC_REF_EFLAGS(pEFlags); \
9534 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9535 \
9536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9537 IEM_MC_END(); \
9538 break; \
9539 \
9540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9541 } \
9542 } \
9543 else \
9544 { \
9545 switch (pVCpu->iem.s.enmEffOpSize) \
9546 { \
9547 case IEMMODE_16BIT: \
9548 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9549 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9550 IEM_MC_ARG(uint16_t, u16Src, 1); \
9551 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9553 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9554 \
9555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9557 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9558 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9559 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9560 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9561 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9562 \
9563 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9564 IEM_MC_COMMIT_EFLAGS(EFlags); \
9565 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9566 IEM_MC_END(); \
9567 break; \
9568 \
9569 case IEMMODE_32BIT: \
9570 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9571 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9572 IEM_MC_ARG(uint32_t, u32Src, 1); \
9573 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9575 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9576 \
9577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9579 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9580 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9581 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9582 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9583 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9584 \
9585 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9586 IEM_MC_COMMIT_EFLAGS(EFlags); \
9587 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9588 IEM_MC_END(); \
9589 break; \
9590 \
9591 case IEMMODE_64BIT: \
9592 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9593 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9594 IEM_MC_ARG(uint64_t, u64Src, 1); \
9595 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9597 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9598 \
9599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9601 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9602 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9603 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9604 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9605 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9606 \
9607 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9608 IEM_MC_COMMIT_EFLAGS(EFlags); \
9609 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9610 IEM_MC_END(); \
9611 break; \
9612 \
9613 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9614 } \
9615 } (void)0
9616
9617
9618/**
9619 * @opcode 0xa4
9620 * @opflclass shift_count
9621 */
9622FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9623{
9624 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9625 IEMOP_HLP_MIN_386();
9626 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9627}
9628
9629
9630/**
9631 * @opcode 0xa5
9632 * @opflclass shift_count
9633 */
9634FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9635{
9636 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9637 IEMOP_HLP_MIN_386();
9638 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9639}
9640
9641
9642/** Opcode 0x0f 0xa8. */
9643FNIEMOP_DEF(iemOp_push_gs)
9644{
9645 IEMOP_MNEMONIC(push_gs, "push gs");
9646 IEMOP_HLP_MIN_386();
9647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9648 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9649}
9650
9651
9652/** Opcode 0x0f 0xa9. */
9653FNIEMOP_DEF(iemOp_pop_gs)
9654{
9655 IEMOP_MNEMONIC(pop_gs, "pop gs");
9656 IEMOP_HLP_MIN_386();
9657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9658 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9659 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9660 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9661 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9662 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9663 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9664 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9665 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9666}
9667
9668
9669/** Opcode 0x0f 0xaa. */
9670FNIEMOP_DEF(iemOp_rsm)
9671{
9672 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9673 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9675 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9676 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9677 iemCImpl_rsm);
9678}
9679
9680
9681
9682/**
9683 * @opcode 0xab
9684 * @oppfx n/a
9685 * @opflclass bitmap
9686 */
9687FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9688{
9689 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9690 IEMOP_HLP_MIN_386();
9691 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9692 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9693}
9694
9695
9696/**
9697 * @opcode 0xac
9698 * @opflclass shift_count
9699 */
9700FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9701{
9702 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9703 IEMOP_HLP_MIN_386();
9704 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9705}
9706
9707
9708/**
9709 * @opcode 0xad
9710 * @opflclass shift_count
9711 */
9712FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9713{
9714 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9715 IEMOP_HLP_MIN_386();
9716 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9717}
9718
9719
9720/** Opcode 0x0f 0xae mem/0. */
9721FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9722{
9723 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9724 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9725 IEMOP_RAISE_INVALID_OPCODE_RET();
9726
9727 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9728 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9731 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9732 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9733 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9734 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9735 IEM_MC_END();
9736}
9737
9738
9739/** Opcode 0x0f 0xae mem/1. */
9740FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9741{
9742 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9743 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9744 IEMOP_RAISE_INVALID_OPCODE_RET();
9745
9746 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9747 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9750 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9751 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9752 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9753 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9754 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9755 IEM_MC_END();
9756}
9757
9758
9759/**
9760 * @opmaps grp15
9761 * @opcode !11/2
9762 * @oppfx none
9763 * @opcpuid sse
9764 * @opgroup og_sse_mxcsrsm
9765 * @opxcpttype 5
9766 * @optest op1=0 -> mxcsr=0
9767 * @optest op1=0x2083 -> mxcsr=0x2083
9768 * @optest op1=0xfffffffe -> value.xcpt=0xd
9769 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9770 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9771 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9772 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9773 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9774 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9775 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9776 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9777 */
9778FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9779{
9780 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9781 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9782 IEMOP_RAISE_INVALID_OPCODE_RET();
9783
9784 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9785 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9788 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9789 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9790 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9791 IEM_MC_END();
9792}
9793
9794
9795/**
9796 * @opmaps grp15
9797 * @opcode !11/3
9798 * @oppfx none
9799 * @opcpuid sse
9800 * @opgroup og_sse_mxcsrsm
9801 * @opxcpttype 5
9802 * @optest mxcsr=0 -> op1=0
9803 * @optest mxcsr=0x2083 -> op1=0x2083
9804 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9805 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9806 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9807 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9808 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9809 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9810 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9811 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9812 */
9813FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9814{
9815 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9816 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9817 IEMOP_RAISE_INVALID_OPCODE_RET();
9818
9819 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9820 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9823 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9824 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9825 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9826 IEM_MC_END();
9827}
9828
9829
9830/**
9831 * @opmaps grp15
9832 * @opcode !11/4
9833 * @oppfx none
9834 * @opcpuid xsave
9835 * @opgroup og_system
9836 * @opxcpttype none
9837 */
9838FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9839{
9840 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9841 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9842 IEMOP_RAISE_INVALID_OPCODE_RET();
9843
9844 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9845 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9848 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9849 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9850 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9851 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9852 IEM_MC_END();
9853}
9854
9855
9856/**
9857 * @opmaps grp15
9858 * @opcode !11/5
9859 * @oppfx none
9860 * @opcpuid xsave
9861 * @opgroup og_system
9862 * @opxcpttype none
9863 */
9864FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9865{
9866 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9867 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9868 IEMOP_RAISE_INVALID_OPCODE_RET();
9869
9870 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9871 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9874 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9875 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9876 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9877 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9878 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9879 IEM_MC_END();
9880}
9881
9882/** Opcode 0x0f 0xae mem/6. */
9883FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9884
9885/**
9886 * @opmaps grp15
9887 * @opcode !11/7
9888 * @oppfx none
9889 * @opcpuid clfsh
9890 * @opgroup og_cachectl
9891 * @optest op1=1 ->
9892 */
9893FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9894{
9895 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9896 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9897 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9898
9899 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9900 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9903 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9904 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9905 IEM_MC_END();
9906}
9907
9908/**
9909 * @opmaps grp15
9910 * @opcode !11/7
9911 * @oppfx 0x66
9912 * @opcpuid clflushopt
9913 * @opgroup og_cachectl
9914 * @optest op1=1 ->
9915 */
9916FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9917{
9918 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9919 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9920 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9921
9922 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9923 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9926 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9927 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9928 IEM_MC_END();
9929}
9930
9931
9932/** Opcode 0x0f 0xae 11b/5. */
9933FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9934{
9935 RT_NOREF_PV(bRm);
9936 IEMOP_MNEMONIC(lfence, "lfence");
9937 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9939#ifdef RT_ARCH_ARM64
9940 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9941#else
9942 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9943 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9944 else
9945 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9946#endif
9947 IEM_MC_ADVANCE_RIP_AND_FINISH();
9948 IEM_MC_END();
9949}
9950
9951
9952/** Opcode 0x0f 0xae 11b/6. */
9953FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9954{
9955 RT_NOREF_PV(bRm);
9956 IEMOP_MNEMONIC(mfence, "mfence");
9957 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9959#ifdef RT_ARCH_ARM64
9960 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9961#else
9962 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9963 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9964 else
9965 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9966#endif
9967 IEM_MC_ADVANCE_RIP_AND_FINISH();
9968 IEM_MC_END();
9969}
9970
9971
9972/** Opcode 0x0f 0xae 11b/7. */
9973FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9974{
9975 RT_NOREF_PV(bRm);
9976 IEMOP_MNEMONIC(sfence, "sfence");
9977 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9979#ifdef RT_ARCH_ARM64
9980 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9981#else
9982 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9983 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9984 else
9985 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9986#endif
9987 IEM_MC_ADVANCE_RIP_AND_FINISH();
9988 IEM_MC_END();
9989}
9990
9991
9992/** Opcode 0xf3 0x0f 0xae 11b/0. */
9993FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9994{
9995 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9996 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9997 {
9998 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10000 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10001 IEM_MC_LOCAL(uint64_t, u64Dst);
10002 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10003 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10004 IEM_MC_ADVANCE_RIP_AND_FINISH();
10005 IEM_MC_END();
10006 }
10007 else
10008 {
10009 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10011 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10012 IEM_MC_LOCAL(uint32_t, u32Dst);
10013 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10014 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10015 IEM_MC_ADVANCE_RIP_AND_FINISH();
10016 IEM_MC_END();
10017 }
10018}
10019
10020
10021/** Opcode 0xf3 0x0f 0xae 11b/1. */
10022FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10023{
10024 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10025 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10026 {
10027 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10029 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10030 IEM_MC_LOCAL(uint64_t, u64Dst);
10031 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10032 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10033 IEM_MC_ADVANCE_RIP_AND_FINISH();
10034 IEM_MC_END();
10035 }
10036 else
10037 {
10038 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10040 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10041 IEM_MC_LOCAL(uint32_t, u32Dst);
10042 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10043 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10044 IEM_MC_ADVANCE_RIP_AND_FINISH();
10045 IEM_MC_END();
10046 }
10047}
10048
10049
10050/** Opcode 0xf3 0x0f 0xae 11b/2. */
10051FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10052{
10053 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10054 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10055 {
10056 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10058 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10059 IEM_MC_LOCAL(uint64_t, u64Dst);
10060 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10061 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10062 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10063 IEM_MC_ADVANCE_RIP_AND_FINISH();
10064 IEM_MC_END();
10065 }
10066 else
10067 {
10068 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10070 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10071 IEM_MC_LOCAL(uint32_t, u32Dst);
10072 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10073 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10074 IEM_MC_ADVANCE_RIP_AND_FINISH();
10075 IEM_MC_END();
10076 }
10077}
10078
10079
10080/** Opcode 0xf3 0x0f 0xae 11b/3. */
10081FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10082{
10083 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10084 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10085 {
10086 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10088 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10089 IEM_MC_LOCAL(uint64_t, u64Dst);
10090 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10091 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10092 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10093 IEM_MC_ADVANCE_RIP_AND_FINISH();
10094 IEM_MC_END();
10095 }
10096 else
10097 {
10098 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10100 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10101 IEM_MC_LOCAL(uint32_t, u32Dst);
10102 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10103 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10104 IEM_MC_ADVANCE_RIP_AND_FINISH();
10105 IEM_MC_END();
10106 }
10107}
10108
10109
10110/**
10111 * Group 15 jump table for register variant.
10112 */
10113IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10114{ /* pfx: none, 066h, 0f3h, 0f2h */
10115 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10116 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10117 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10118 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10119 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10120 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10121 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10122 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10123};
10124AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10125
10126
10127/**
10128 * Group 15 jump table for memory variant.
10129 */
10130IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10131{ /* pfx: none, 066h, 0f3h, 0f2h */
10132 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10133 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10134 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10135 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10136 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10137 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10138 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10139 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10140};
10141AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10142
10143
10144/** Opcode 0x0f 0xae. */
10145FNIEMOP_DEF(iemOp_Grp15)
10146{
10147 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10149 if (IEM_IS_MODRM_REG_MODE(bRm))
10150 /* register, register */
10151 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10152 + pVCpu->iem.s.idxPrefix], bRm);
10153 /* memory, register */
10154 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10155 + pVCpu->iem.s.idxPrefix], bRm);
10156}
10157
10158
10159/**
10160 * @opcode 0xaf
10161 * @opflclass multiply
10162 */
10163FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10164{
10165 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10166 IEMOP_HLP_MIN_386();
10167 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10168 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10170 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10171}
10172
10173
10174/**
10175 * @opcode 0xb0
10176 * @opflclass arithmetic
10177 */
10178FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10179{
10180 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10181 IEMOP_HLP_MIN_486();
10182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10183
10184 if (IEM_IS_MODRM_REG_MODE(bRm))
10185 {
10186 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10188 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10189 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10190 IEM_MC_ARG(uint8_t, u8Src, 2);
10191 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10192
10193 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10194 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10195 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10196 IEM_MC_REF_EFLAGS(pEFlags);
10197 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10198
10199 IEM_MC_ADVANCE_RIP_AND_FINISH();
10200 IEM_MC_END();
10201 }
10202 else
10203 {
10204#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10205 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10208 IEMOP_HLP_DONE_DECODING(); \
10209 \
10210 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10211 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10212 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10213 \
10214 IEM_MC_ARG(uint8_t, u8Src, 2); \
10215 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10216 \
10217 IEM_MC_LOCAL(uint8_t, u8Al); \
10218 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10219 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10220 \
10221 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10222 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10223 \
10224 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10225 IEM_MC_COMMIT_EFLAGS(EFlags); \
10226 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10227 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10228 IEM_MC_END()
10229
10230 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10231 {
10232 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10233 }
10234 else
10235 {
10236 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10237 }
10238 }
10239}
10240
10241/**
10242 * @opcode 0xb1
10243 * @opflclass arithmetic
10244 */
10245FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10246{
10247 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10248 IEMOP_HLP_MIN_486();
10249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10250
10251 if (IEM_IS_MODRM_REG_MODE(bRm))
10252 {
10253 switch (pVCpu->iem.s.enmEffOpSize)
10254 {
10255 case IEMMODE_16BIT:
10256 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10258 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10259 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10260 IEM_MC_ARG(uint16_t, u16Src, 2);
10261 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10262
10263 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10264 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10265 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10266 IEM_MC_REF_EFLAGS(pEFlags);
10267 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10268
10269 IEM_MC_ADVANCE_RIP_AND_FINISH();
10270 IEM_MC_END();
10271 break;
10272
10273 case IEMMODE_32BIT:
10274 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10276 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10277 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10278 IEM_MC_ARG(uint32_t, u32Src, 2);
10279 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10280
10281 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10282 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10283 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10284 IEM_MC_REF_EFLAGS(pEFlags);
10285 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10286
10287 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10288 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10289 } IEM_MC_ELSE() {
10290 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10291 } IEM_MC_ENDIF();
10292
10293 IEM_MC_ADVANCE_RIP_AND_FINISH();
10294 IEM_MC_END();
10295 break;
10296
10297 case IEMMODE_64BIT:
10298 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10300 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10301 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10302 IEM_MC_ARG(uint64_t, u64Src, 2);
10303 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10304
10305 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10306 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10307 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10308 IEM_MC_REF_EFLAGS(pEFlags);
10309 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10310
10311 IEM_MC_ADVANCE_RIP_AND_FINISH();
10312 IEM_MC_END();
10313 break;
10314
10315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10316 }
10317 }
10318 else
10319 {
10320#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10321 do { \
10322 switch (pVCpu->iem.s.enmEffOpSize) \
10323 { \
10324 case IEMMODE_16BIT: \
10325 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10326 \
10327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10330 IEMOP_HLP_DONE_DECODING(); \
10331 \
10332 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10333 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10334 \
10335 IEM_MC_ARG(uint16_t, u16Src, 2); \
10336 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10337 \
10338 IEM_MC_LOCAL(uint16_t, u16Ax); \
10339 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10340 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10341 \
10342 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10343 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10344 \
10345 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10346 IEM_MC_COMMIT_EFLAGS(EFlags); \
10347 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10348 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10349 IEM_MC_END(); \
10350 break; \
10351 \
10352 case IEMMODE_32BIT: \
10353 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10356 IEMOP_HLP_DONE_DECODING(); \
10357 \
10358 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10360 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10361 \
10362 IEM_MC_ARG(uint32_t, u32Src, 2); \
10363 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10364 \
10365 IEM_MC_LOCAL(uint32_t, u32Eax); \
10366 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10367 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10368 \
10369 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10370 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10371 \
10372 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10373 IEM_MC_COMMIT_EFLAGS(EFlags); \
10374 \
10375 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10376 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10377 } IEM_MC_ENDIF(); \
10378 \
10379 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10380 IEM_MC_END(); \
10381 break; \
10382 \
10383 case IEMMODE_64BIT: \
10384 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10387 IEMOP_HLP_DONE_DECODING(); \
10388 \
10389 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10390 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10391 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10392 \
10393 IEM_MC_ARG(uint64_t, u64Src, 2); \
10394 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10395 \
10396 IEM_MC_LOCAL(uint64_t, u64Rax); \
10397 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10398 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10399 \
10400 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10401 \
10402 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10403 \
10404 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10405 IEM_MC_COMMIT_EFLAGS(EFlags); \
10406 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10408 IEM_MC_END(); \
10409 break; \
10410 \
10411 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10412 } \
10413 } while (0)
10414
10415 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10416 {
10417 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10418 }
10419 else
10420 {
10421 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10422 }
10423 }
10424}
10425
10426
10427/** Opcode 0x0f 0xb2. */
10428FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10429{
10430 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10431 IEMOP_HLP_MIN_386();
10432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10433 if (IEM_IS_MODRM_REG_MODE(bRm))
10434 IEMOP_RAISE_INVALID_OPCODE_RET();
10435 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10436}
10437
10438
10439/**
10440 * @opcode 0xb3
10441 * @oppfx n/a
10442 * @opflclass bitmap
10443 */
10444FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10445{
10446 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10447 IEMOP_HLP_MIN_386();
10448 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10449 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10450}
10451
10452
10453/** Opcode 0x0f 0xb4. */
10454FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10455{
10456 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10457 IEMOP_HLP_MIN_386();
10458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10459 if (IEM_IS_MODRM_REG_MODE(bRm))
10460 IEMOP_RAISE_INVALID_OPCODE_RET();
10461 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10462}
10463
10464
10465/** Opcode 0x0f 0xb5. */
10466FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10467{
10468 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10469 IEMOP_HLP_MIN_386();
10470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10471 if (IEM_IS_MODRM_REG_MODE(bRm))
10472 IEMOP_RAISE_INVALID_OPCODE_RET();
10473 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10474}
10475
10476
10477/** Opcode 0x0f 0xb6. */
10478FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10479{
10480 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10481 IEMOP_HLP_MIN_386();
10482
10483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10484
10485 /*
10486 * If rm is denoting a register, no more instruction bytes.
10487 */
10488 if (IEM_IS_MODRM_REG_MODE(bRm))
10489 {
10490 switch (pVCpu->iem.s.enmEffOpSize)
10491 {
10492 case IEMMODE_16BIT:
10493 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10495 IEM_MC_LOCAL(uint16_t, u16Value);
10496 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10497 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10498 IEM_MC_ADVANCE_RIP_AND_FINISH();
10499 IEM_MC_END();
10500 break;
10501
10502 case IEMMODE_32BIT:
10503 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10505 IEM_MC_LOCAL(uint32_t, u32Value);
10506 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10507 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10508 IEM_MC_ADVANCE_RIP_AND_FINISH();
10509 IEM_MC_END();
10510 break;
10511
10512 case IEMMODE_64BIT:
10513 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10515 IEM_MC_LOCAL(uint64_t, u64Value);
10516 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10517 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10518 IEM_MC_ADVANCE_RIP_AND_FINISH();
10519 IEM_MC_END();
10520 break;
10521
10522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10523 }
10524 }
10525 else
10526 {
10527 /*
10528 * We're loading a register from memory.
10529 */
10530 switch (pVCpu->iem.s.enmEffOpSize)
10531 {
10532 case IEMMODE_16BIT:
10533 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10534 IEM_MC_LOCAL(uint16_t, u16Value);
10535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10538 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10539 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10540 IEM_MC_ADVANCE_RIP_AND_FINISH();
10541 IEM_MC_END();
10542 break;
10543
10544 case IEMMODE_32BIT:
10545 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10546 IEM_MC_LOCAL(uint32_t, u32Value);
10547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10550 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10551 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10552 IEM_MC_ADVANCE_RIP_AND_FINISH();
10553 IEM_MC_END();
10554 break;
10555
10556 case IEMMODE_64BIT:
10557 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10558 IEM_MC_LOCAL(uint64_t, u64Value);
10559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10562 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10563 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10564 IEM_MC_ADVANCE_RIP_AND_FINISH();
10565 IEM_MC_END();
10566 break;
10567
10568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10569 }
10570 }
10571}
10572
10573
10574/** Opcode 0x0f 0xb7. */
10575FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10576{
10577 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10578 IEMOP_HLP_MIN_386();
10579
10580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10581
10582 /** @todo Not entirely sure how the operand size prefix is handled here,
10583 * assuming that it will be ignored. Would be nice to have a few
10584 * test for this. */
10585
10586 /** @todo There should be no difference in the behaviour whether REX.W is
10587 * present or not... */
10588
10589 /*
10590 * If rm is denoting a register, no more instruction bytes.
10591 */
10592 if (IEM_IS_MODRM_REG_MODE(bRm))
10593 {
10594 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10595 {
10596 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10598 IEM_MC_LOCAL(uint32_t, u32Value);
10599 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10600 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10601 IEM_MC_ADVANCE_RIP_AND_FINISH();
10602 IEM_MC_END();
10603 }
10604 else
10605 {
10606 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10608 IEM_MC_LOCAL(uint64_t, u64Value);
10609 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10610 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10611 IEM_MC_ADVANCE_RIP_AND_FINISH();
10612 IEM_MC_END();
10613 }
10614 }
10615 else
10616 {
10617 /*
10618 * We're loading a register from memory.
10619 */
10620 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10621 {
10622 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10623 IEM_MC_LOCAL(uint32_t, u32Value);
10624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10627 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10628 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10629 IEM_MC_ADVANCE_RIP_AND_FINISH();
10630 IEM_MC_END();
10631 }
10632 else
10633 {
10634 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10635 IEM_MC_LOCAL(uint64_t, u64Value);
10636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10639 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10640 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10641 IEM_MC_ADVANCE_RIP_AND_FINISH();
10642 IEM_MC_END();
10643 }
10644 }
10645}
10646
10647
10648/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10649FNIEMOP_UD_STUB(iemOp_jmpe);
10650
10651
10652/**
10653 * @opcode 0xb8
10654 * @oppfx 0xf3
10655 * @opflmodify cf,pf,af,zf,sf,of
10656 * @opflclear cf,pf,af,sf,of
10657 */
10658FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10659{
10660 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10661 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10662 return iemOp_InvalidNeedRM(pVCpu);
10663#ifndef TST_IEM_CHECK_MC
10664# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10665 static const IEMOPBINSIZES s_Native =
10666 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10667# endif
10668 static const IEMOPBINSIZES s_Fallback =
10669 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10670#endif
10671 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10673 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10674}
10675
10676
10677/**
10678 * @opcode 0xb9
10679 * @opinvalid intel-modrm
10680 * @optest ->
10681 */
10682FNIEMOP_DEF(iemOp_Grp10)
10683{
10684 /*
10685 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10686 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10687 */
10688 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10689 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10690 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10691}
10692
10693
10694/**
10695 * Body for group 8 bit instruction.
10696 */
10697#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10698 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10699 \
10700 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10701 { \
10702 /* register destination. */ \
10703 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10704 \
10705 switch (pVCpu->iem.s.enmEffOpSize) \
10706 { \
10707 case IEMMODE_16BIT: \
10708 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10710 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10711 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10712 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10713 \
10714 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10715 IEM_MC_REF_EFLAGS(pEFlags); \
10716 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10717 \
10718 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10719 IEM_MC_END(); \
10720 break; \
10721 \
10722 case IEMMODE_32BIT: \
10723 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10725 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10726 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10727 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10728 \
10729 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10730 IEM_MC_REF_EFLAGS(pEFlags); \
10731 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10732 \
10733 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10734 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10735 IEM_MC_END(); \
10736 break; \
10737 \
10738 case IEMMODE_64BIT: \
10739 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10741 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10742 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10743 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10744 \
10745 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10746 IEM_MC_REF_EFLAGS(pEFlags); \
10747 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10748 \
10749 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10750 IEM_MC_END(); \
10751 break; \
10752 \
10753 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10754 } \
10755 } \
10756 else \
10757 { \
10758 /* memory destination. */ \
10759 /** @todo test negative bit offsets! */ \
10760 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10761 { \
10762 switch (pVCpu->iem.s.enmEffOpSize) \
10763 { \
10764 case IEMMODE_16BIT: \
10765 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10768 \
10769 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10770 IEMOP_HLP_DONE_DECODING(); \
10771 \
10772 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10773 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10774 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10775 \
10776 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10777 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10778 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10779 \
10780 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10781 IEM_MC_COMMIT_EFLAGS(EFlags); \
10782 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10783 IEM_MC_END(); \
10784 break; \
10785 \
10786 case IEMMODE_32BIT: \
10787 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10790 \
10791 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10792 IEMOP_HLP_DONE_DECODING(); \
10793 \
10794 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10795 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10796 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10797 \
10798 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10799 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10800 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10801 \
10802 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10803 IEM_MC_COMMIT_EFLAGS(EFlags); \
10804 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10805 IEM_MC_END(); \
10806 break; \
10807 \
10808 case IEMMODE_64BIT: \
10809 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10812 \
10813 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10814 IEMOP_HLP_DONE_DECODING(); \
10815 \
10816 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10817 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10818 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10819 \
10820 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10821 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10822 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10823 \
10824 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10825 IEM_MC_COMMIT_EFLAGS(EFlags); \
10826 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10827 IEM_MC_END(); \
10828 break; \
10829 \
10830 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10831 } \
10832 } \
10833 else \
10834 { \
10835 (void)0
10836/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10837#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10838 switch (pVCpu->iem.s.enmEffOpSize) \
10839 { \
10840 case IEMMODE_16BIT: \
10841 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10844 \
10845 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10846 IEMOP_HLP_DONE_DECODING(); \
10847 \
10848 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10849 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10850 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10851 \
10852 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10854 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10855 \
10856 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10857 IEM_MC_COMMIT_EFLAGS(EFlags); \
10858 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10859 IEM_MC_END(); \
10860 break; \
10861 \
10862 case IEMMODE_32BIT: \
10863 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10866 \
10867 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10868 IEMOP_HLP_DONE_DECODING(); \
10869 \
10870 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10871 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10872 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10873 \
10874 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10875 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10876 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10877 \
10878 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10879 IEM_MC_COMMIT_EFLAGS(EFlags); \
10880 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10881 IEM_MC_END(); \
10882 break; \
10883 \
10884 case IEMMODE_64BIT: \
10885 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10888 \
10889 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10890 IEMOP_HLP_DONE_DECODING(); \
10891 \
10892 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10893 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10894 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10895 \
10896 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10897 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10898 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10899 \
10900 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10901 IEM_MC_COMMIT_EFLAGS(EFlags); \
10902 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10903 IEM_MC_END(); \
10904 break; \
10905 \
10906 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10907 } \
10908 } \
10909 } \
10910 (void)0
10911
10912/* Read-only version (bt) */
10913#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10914 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10915 \
10916 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10917 { \
10918 /* register destination. */ \
10919 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10920 \
10921 switch (pVCpu->iem.s.enmEffOpSize) \
10922 { \
10923 case IEMMODE_16BIT: \
10924 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10926 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10927 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10928 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10929 \
10930 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10931 IEM_MC_REF_EFLAGS(pEFlags); \
10932 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10933 \
10934 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10935 IEM_MC_END(); \
10936 break; \
10937 \
10938 case IEMMODE_32BIT: \
10939 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10941 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
10942 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10943 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10944 \
10945 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10946 IEM_MC_REF_EFLAGS(pEFlags); \
10947 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10948 \
10949 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10950 IEM_MC_END(); \
10951 break; \
10952 \
10953 case IEMMODE_64BIT: \
10954 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10956 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
10957 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10958 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10959 \
10960 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10961 IEM_MC_REF_EFLAGS(pEFlags); \
10962 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10963 \
10964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10965 IEM_MC_END(); \
10966 break; \
10967 \
10968 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10969 } \
10970 } \
10971 else \
10972 { \
10973 /* memory destination. */ \
10974 /** @todo test negative bit offsets! */ \
10975 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10976 { \
10977 switch (pVCpu->iem.s.enmEffOpSize) \
10978 { \
10979 case IEMMODE_16BIT: \
10980 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10983 \
10984 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10985 IEMOP_HLP_DONE_DECODING(); \
10986 \
10987 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10988 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10989 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10990 \
10991 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10992 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10993 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10994 \
10995 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10996 IEM_MC_COMMIT_EFLAGS(EFlags); \
10997 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10998 IEM_MC_END(); \
10999 break; \
11000 \
11001 case IEMMODE_32BIT: \
11002 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11005 \
11006 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11007 IEMOP_HLP_DONE_DECODING(); \
11008 \
11009 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11010 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11011 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11012 \
11013 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11014 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11015 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11016 \
11017 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11018 IEM_MC_COMMIT_EFLAGS(EFlags); \
11019 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11020 IEM_MC_END(); \
11021 break; \
11022 \
11023 case IEMMODE_64BIT: \
11024 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11027 \
11028 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11029 IEMOP_HLP_DONE_DECODING(); \
11030 \
11031 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11032 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11033 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11034 \
11035 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11036 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11037 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11038 \
11039 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11040 IEM_MC_COMMIT_EFLAGS(EFlags); \
11041 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11042 IEM_MC_END(); \
11043 break; \
11044 \
11045 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11046 } \
11047 } \
11048 else \
11049 { \
11050 IEMOP_HLP_DONE_DECODING(); \
11051 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11052 } \
11053 } \
11054 (void)0
11055
11056
11057/**
11058 * @opmaps grp8
11059 * @opcode /4
11060 * @oppfx n/a
11061 * @opflclass bitmap
11062 */
11063FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11064{
11065 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11066 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11067}
11068
11069
11070/**
11071 * @opmaps grp8
11072 * @opcode /5
11073 * @oppfx n/a
11074 * @opflclass bitmap
11075 */
11076FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11077{
11078 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11079 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11080 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11081}
11082
11083
11084/**
11085 * @opmaps grp8
11086 * @opcode /6
11087 * @oppfx n/a
11088 * @opflclass bitmap
11089 */
11090FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11091{
11092 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11093 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11094 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11095}
11096
11097
11098/**
11099 * @opmaps grp8
11100 * @opcode /7
11101 * @oppfx n/a
11102 * @opflclass bitmap
11103 */
11104FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11105{
11106 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11107 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11108 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11109}
11110
11111
11112/** Opcode 0x0f 0xba. */
11113FNIEMOP_DEF(iemOp_Grp8)
11114{
11115 IEMOP_HLP_MIN_386();
11116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11117 switch (IEM_GET_MODRM_REG_8(bRm))
11118 {
11119 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11120 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11121 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11122 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11123
11124 case 0: case 1: case 2: case 3:
11125 /* Both AMD and Intel want full modr/m decoding and imm8. */
11126 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11127
11128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11129 }
11130}
11131
11132
11133/**
11134 * @opcode 0xbb
11135 * @oppfx n/a
11136 * @opflclass bitmap
11137 */
11138FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11139{
11140 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11141 IEMOP_HLP_MIN_386();
11142 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11143 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11144}
11145
11146
11147/**
11148 * Body for BSF and BSR instructions.
11149 *
11150 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11151 * the destination register, which means that for 32-bit operations the high
11152 * bits must be left alone.
11153 *
11154 * @param pImpl Pointer to the instruction implementation (assembly).
11155 */
11156#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
11157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
11158 \
11159 /* \
11160 * If rm is denoting a register, no more instruction bytes. \
11161 */ \
11162 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11163 { \
11164 switch (pVCpu->iem.s.enmEffOpSize) \
11165 { \
11166 case IEMMODE_16BIT: \
11167 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11169 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11170 IEM_MC_ARG(uint16_t, u16Src, 1); \
11171 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11172 \
11173 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11174 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11175 IEM_MC_REF_EFLAGS(pEFlags); \
11176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); \
11177 \
11178 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11179 IEM_MC_END(); \
11180 break; \
11181 \
11182 case IEMMODE_32BIT: \
11183 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11185 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11186 IEM_MC_ARG(uint32_t, u32Src, 1); \
11187 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11188 \
11189 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11190 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11191 IEM_MC_REF_EFLAGS(pEFlags); \
11192 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); \
11193 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11194 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11195 } IEM_MC_ENDIF(); \
11196 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11197 IEM_MC_END(); \
11198 break; \
11199 \
11200 case IEMMODE_64BIT: \
11201 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11203 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11204 IEM_MC_ARG(uint64_t, u64Src, 1); \
11205 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11206 \
11207 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11208 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11209 IEM_MC_REF_EFLAGS(pEFlags); \
11210 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); \
11211 \
11212 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11213 IEM_MC_END(); \
11214 break; \
11215 \
11216 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11217 } \
11218 } \
11219 else \
11220 { \
11221 /* \
11222 * We're accessing memory. \
11223 */ \
11224 switch (pVCpu->iem.s.enmEffOpSize) \
11225 { \
11226 case IEMMODE_16BIT: \
11227 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11228 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11229 IEM_MC_ARG(uint16_t, u16Src, 1); \
11230 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11232 \
11233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11235 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11236 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11237 IEM_MC_REF_EFLAGS(pEFlags); \
11238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); \
11239 \
11240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11241 IEM_MC_END(); \
11242 break; \
11243 \
11244 case IEMMODE_32BIT: \
11245 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11246 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11247 IEM_MC_ARG(uint32_t, u32Src, 1); \
11248 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11250 \
11251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11253 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11254 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11255 IEM_MC_REF_EFLAGS(pEFlags); \
11256 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); \
11257 \
11258 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11259 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11260 } IEM_MC_ENDIF(); \
11261 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11262 IEM_MC_END(); \
11263 break; \
11264 \
11265 case IEMMODE_64BIT: \
11266 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11267 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11268 IEM_MC_ARG(uint64_t, u64Src, 1); \
11269 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11271 \
11272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11274 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11275 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11276 IEM_MC_REF_EFLAGS(pEFlags); \
11277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); \
11278 \
11279 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11280 IEM_MC_END(); \
11281 break; \
11282 \
11283 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11284 } \
11285 } (void)0
11286
11287
11288/**
11289 * @opcode 0xbc
11290 * @oppfx !0xf3
11291 * @opfltest cf,pf,af,sf,of
11292 * @opflmodify cf,pf,af,zf,sf,of
11293 * @opflundef cf,pf,af,sf,of
11294 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11295 * document them as inputs. Sigh.
11296 */
11297FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11298{
11299 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11300 IEMOP_HLP_MIN_386();
11301 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11302 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11303 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11304}
11305
11306
11307/**
11308 * @opcode 0xbc
11309 * @oppfx 0xf3
11310 * @opfltest pf,af,sf,of
11311 * @opflmodify cf,pf,af,zf,sf,of
11312 * @opflundef pf,af,sf,of
11313 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11314 * document them as inputs. Sigh.
11315 */
11316FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11317{
11318 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11319 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11320 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11321
11322#ifndef TST_IEM_CHECK_MC
11323 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11324 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11325 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11326 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11327 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11328 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11329 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11330 {
11331 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11332 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11333 };
11334#endif
11335 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11336 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11337 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11339 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11340}
11341
11342
11343/**
11344 * @opcode 0xbd
11345 * @oppfx !0xf3
11346 * @opfltest cf,pf,af,sf,of
11347 * @opflmodify cf,pf,af,zf,sf,of
11348 * @opflundef cf,pf,af,sf,of
11349 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11350 * document them as inputs. Sigh.
11351 */
11352FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11353{
11354 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11355 IEMOP_HLP_MIN_386();
11356 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11357 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11358 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11359}
11360
11361
11362/**
11363 * @opcode 0xbd
11364 * @oppfx 0xf3
11365 * @opfltest pf,af,sf,of
11366 * @opflmodify cf,pf,af,zf,sf,of
11367 * @opflundef pf,af,sf,of
11368 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11369 * document them as inputs. Sigh.
11370 */
11371FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11372{
11373 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11374 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11375 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11376
11377#ifndef TST_IEM_CHECK_MC
11378 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11379 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11380 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11381 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11382 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11383 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11384 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11385 {
11386 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11387 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11388 };
11389#endif
11390 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11391 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11392 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11394 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11395}
11396
11397
11398
11399/** Opcode 0x0f 0xbe. */
11400FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11401{
11402 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11403 IEMOP_HLP_MIN_386();
11404
11405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11406
11407 /*
11408 * If rm is denoting a register, no more instruction bytes.
11409 */
11410 if (IEM_IS_MODRM_REG_MODE(bRm))
11411 {
11412 switch (pVCpu->iem.s.enmEffOpSize)
11413 {
11414 case IEMMODE_16BIT:
11415 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11417 IEM_MC_LOCAL(uint16_t, u16Value);
11418 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11419 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11420 IEM_MC_ADVANCE_RIP_AND_FINISH();
11421 IEM_MC_END();
11422 break;
11423
11424 case IEMMODE_32BIT:
11425 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11427 IEM_MC_LOCAL(uint32_t, u32Value);
11428 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11429 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11430 IEM_MC_ADVANCE_RIP_AND_FINISH();
11431 IEM_MC_END();
11432 break;
11433
11434 case IEMMODE_64BIT:
11435 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11437 IEM_MC_LOCAL(uint64_t, u64Value);
11438 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11439 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11440 IEM_MC_ADVANCE_RIP_AND_FINISH();
11441 IEM_MC_END();
11442 break;
11443
11444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11445 }
11446 }
11447 else
11448 {
11449 /*
11450 * We're loading a register from memory.
11451 */
11452 switch (pVCpu->iem.s.enmEffOpSize)
11453 {
11454 case IEMMODE_16BIT:
11455 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11456 IEM_MC_LOCAL(uint16_t, u16Value);
11457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11460 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11461 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11462 IEM_MC_ADVANCE_RIP_AND_FINISH();
11463 IEM_MC_END();
11464 break;
11465
11466 case IEMMODE_32BIT:
11467 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11468 IEM_MC_LOCAL(uint32_t, u32Value);
11469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11472 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11473 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11474 IEM_MC_ADVANCE_RIP_AND_FINISH();
11475 IEM_MC_END();
11476 break;
11477
11478 case IEMMODE_64BIT:
11479 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11480 IEM_MC_LOCAL(uint64_t, u64Value);
11481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11484 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11485 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11486 IEM_MC_ADVANCE_RIP_AND_FINISH();
11487 IEM_MC_END();
11488 break;
11489
11490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11491 }
11492 }
11493}
11494
11495
11496/** Opcode 0x0f 0xbf. */
11497FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11498{
11499 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11500 IEMOP_HLP_MIN_386();
11501
11502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11503
11504 /** @todo Not entirely sure how the operand size prefix is handled here,
11505 * assuming that it will be ignored. Would be nice to have a few
11506 * test for this. */
11507 /*
11508 * If rm is denoting a register, no more instruction bytes.
11509 */
11510 if (IEM_IS_MODRM_REG_MODE(bRm))
11511 {
11512 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11513 {
11514 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11516 IEM_MC_LOCAL(uint32_t, u32Value);
11517 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11518 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11519 IEM_MC_ADVANCE_RIP_AND_FINISH();
11520 IEM_MC_END();
11521 }
11522 else
11523 {
11524 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11526 IEM_MC_LOCAL(uint64_t, u64Value);
11527 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11528 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11529 IEM_MC_ADVANCE_RIP_AND_FINISH();
11530 IEM_MC_END();
11531 }
11532 }
11533 else
11534 {
11535 /*
11536 * We're loading a register from memory.
11537 */
11538 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11539 {
11540 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11541 IEM_MC_LOCAL(uint32_t, u32Value);
11542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11545 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11546 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11547 IEM_MC_ADVANCE_RIP_AND_FINISH();
11548 IEM_MC_END();
11549 }
11550 else
11551 {
11552 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11553 IEM_MC_LOCAL(uint64_t, u64Value);
11554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11557 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11558 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11559 IEM_MC_ADVANCE_RIP_AND_FINISH();
11560 IEM_MC_END();
11561 }
11562 }
11563}
11564
11565
11566/**
11567 * @opcode 0xc0
11568 * @opflclass arithmetic
11569 */
11570FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11571{
11572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11573 IEMOP_HLP_MIN_486();
11574 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11575
11576 /*
11577 * If rm is denoting a register, no more instruction bytes.
11578 */
11579 if (IEM_IS_MODRM_REG_MODE(bRm))
11580 {
11581 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11583 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11584 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11585 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11586
11587 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11588 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11589 IEM_MC_REF_EFLAGS(pEFlags);
11590 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11591
11592 IEM_MC_ADVANCE_RIP_AND_FINISH();
11593 IEM_MC_END();
11594 }
11595 else
11596 {
11597 /*
11598 * We're accessing memory.
11599 */
11600#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11601 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11604 IEMOP_HLP_DONE_DECODING(); \
11605 \
11606 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11607 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11608 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11609 \
11610 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11611 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11612 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11613 \
11614 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11615 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11616 \
11617 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11618 IEM_MC_COMMIT_EFLAGS(EFlags); \
11619 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11620 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11621 IEM_MC_END()
11622 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11623 {
11624 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11625 }
11626 else
11627 {
11628 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11629 }
11630 }
11631}
11632
11633
11634/**
11635 * @opcode 0xc1
11636 * @opflclass arithmetic
11637 */
11638FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11639{
11640 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11641 IEMOP_HLP_MIN_486();
11642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11643
11644 /*
11645 * If rm is denoting a register, no more instruction bytes.
11646 */
11647 if (IEM_IS_MODRM_REG_MODE(bRm))
11648 {
11649 switch (pVCpu->iem.s.enmEffOpSize)
11650 {
11651 case IEMMODE_16BIT:
11652 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11654 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11655 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11656 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11657
11658 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11659 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11660 IEM_MC_REF_EFLAGS(pEFlags);
11661 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11662
11663 IEM_MC_ADVANCE_RIP_AND_FINISH();
11664 IEM_MC_END();
11665 break;
11666
11667 case IEMMODE_32BIT:
11668 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11670 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11671 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11672 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11673
11674 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11675 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11676 IEM_MC_REF_EFLAGS(pEFlags);
11677 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11678
11679 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11680 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11681 IEM_MC_ADVANCE_RIP_AND_FINISH();
11682 IEM_MC_END();
11683 break;
11684
11685 case IEMMODE_64BIT:
11686 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11688 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11689 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11690 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11691
11692 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11693 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11694 IEM_MC_REF_EFLAGS(pEFlags);
11695 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11696
11697 IEM_MC_ADVANCE_RIP_AND_FINISH();
11698 IEM_MC_END();
11699 break;
11700
11701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11702 }
11703 }
11704 else
11705 {
11706 /*
11707 * We're accessing memory.
11708 */
11709#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11710 do { \
11711 switch (pVCpu->iem.s.enmEffOpSize) \
11712 { \
11713 case IEMMODE_16BIT: \
11714 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11717 IEMOP_HLP_DONE_DECODING(); \
11718 \
11719 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11720 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11721 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11722 \
11723 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11724 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11725 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11726 \
11727 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11728 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11729 \
11730 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11731 IEM_MC_COMMIT_EFLAGS(EFlags); \
11732 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11733 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11734 IEM_MC_END(); \
11735 break; \
11736 \
11737 case IEMMODE_32BIT: \
11738 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11741 IEMOP_HLP_DONE_DECODING(); \
11742 \
11743 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11744 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11745 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11746 \
11747 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11748 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11749 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11750 \
11751 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11752 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11753 \
11754 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11755 IEM_MC_COMMIT_EFLAGS(EFlags); \
11756 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11757 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11758 IEM_MC_END(); \
11759 break; \
11760 \
11761 case IEMMODE_64BIT: \
11762 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11765 IEMOP_HLP_DONE_DECODING(); \
11766 \
11767 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11768 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11769 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11770 \
11771 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11772 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11773 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11774 \
11775 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11776 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11777 \
11778 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11779 IEM_MC_COMMIT_EFLAGS(EFlags); \
11780 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11781 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11782 IEM_MC_END(); \
11783 break; \
11784 \
11785 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11786 } \
11787 } while (0)
11788
11789 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11790 {
11791 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11792 }
11793 else
11794 {
11795 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11796 }
11797 }
11798}
11799
11800
11801/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11802FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11803{
11804 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11805
11806 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11807 if (IEM_IS_MODRM_REG_MODE(bRm))
11808 {
11809 /*
11810 * XMM, XMM.
11811 */
11812 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11813 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11815 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11816 IEM_MC_LOCAL(X86XMMREG, Dst);
11817 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11818 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11819 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11820 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11821 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11822 IEM_MC_PREPARE_SSE_USAGE();
11823 IEM_MC_REF_MXCSR(pfMxcsr);
11824 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11825 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11826 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11827 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11828 } IEM_MC_ELSE() {
11829 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11830 } IEM_MC_ENDIF();
11831
11832 IEM_MC_ADVANCE_RIP_AND_FINISH();
11833 IEM_MC_END();
11834 }
11835 else
11836 {
11837 /*
11838 * XMM, [mem128].
11839 */
11840 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11841 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11842 IEM_MC_LOCAL(X86XMMREG, Dst);
11843 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11844 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11845 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11847
11848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11849 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11850 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11853 IEM_MC_PREPARE_SSE_USAGE();
11854
11855 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11856 IEM_MC_REF_MXCSR(pfMxcsr);
11857 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11858 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11859 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11860 } IEM_MC_ELSE() {
11861 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11862 } IEM_MC_ENDIF();
11863
11864 IEM_MC_ADVANCE_RIP_AND_FINISH();
11865 IEM_MC_END();
11866 }
11867}
11868
11869
11870/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11871FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11872{
11873 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11874
11875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11876 if (IEM_IS_MODRM_REG_MODE(bRm))
11877 {
11878 /*
11879 * XMM, XMM.
11880 */
11881 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11882 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11884 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11885 IEM_MC_LOCAL(X86XMMREG, Dst);
11886 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11887 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11888 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11889 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11890 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11891 IEM_MC_PREPARE_SSE_USAGE();
11892 IEM_MC_REF_MXCSR(pfMxcsr);
11893 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11894 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11895 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11896 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11897 } IEM_MC_ELSE() {
11898 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11899 } IEM_MC_ENDIF();
11900
11901 IEM_MC_ADVANCE_RIP_AND_FINISH();
11902 IEM_MC_END();
11903 }
11904 else
11905 {
11906 /*
11907 * XMM, [mem128].
11908 */
11909 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11910 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11911 IEM_MC_LOCAL(X86XMMREG, Dst);
11912 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11913 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11914 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11916
11917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11918 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11919 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11921 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11922 IEM_MC_PREPARE_SSE_USAGE();
11923
11924 IEM_MC_REF_MXCSR(pfMxcsr);
11925 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11926 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11927 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11928 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11929 } IEM_MC_ELSE() {
11930 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11931 } IEM_MC_ENDIF();
11932
11933 IEM_MC_ADVANCE_RIP_AND_FINISH();
11934 IEM_MC_END();
11935 }
11936}
11937
11938
11939/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11940FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11941{
11942 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11943
11944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11945 if (IEM_IS_MODRM_REG_MODE(bRm))
11946 {
11947 /*
11948 * XMM32, XMM32.
11949 */
11950 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11951 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11953 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11954 IEM_MC_LOCAL(X86XMMREG, Dst);
11955 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11956 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11957 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11958 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11959 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11960 IEM_MC_PREPARE_SSE_USAGE();
11961 IEM_MC_REF_MXCSR(pfMxcsr);
11962 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11963 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11964 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11965 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11966 } IEM_MC_ELSE() {
11967 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11968 } IEM_MC_ENDIF();
11969
11970 IEM_MC_ADVANCE_RIP_AND_FINISH();
11971 IEM_MC_END();
11972 }
11973 else
11974 {
11975 /*
11976 * XMM32, [mem32].
11977 */
11978 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11979 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11980 IEM_MC_LOCAL(X86XMMREG, Dst);
11981 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11982 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11983 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11985
11986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11987 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11988 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11990 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11991 IEM_MC_PREPARE_SSE_USAGE();
11992
11993 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11994 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11995 IEM_MC_REF_MXCSR(pfMxcsr);
11996 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11997 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11998 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11999 } IEM_MC_ELSE() {
12000 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
12001 } IEM_MC_ENDIF();
12002
12003 IEM_MC_ADVANCE_RIP_AND_FINISH();
12004 IEM_MC_END();
12005 }
12006}
12007
12008
12009/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
12010FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
12011{
12012 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12013
12014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12015 if (IEM_IS_MODRM_REG_MODE(bRm))
12016 {
12017 /*
12018 * XMM64, XMM64.
12019 */
12020 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12021 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12023 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12024 IEM_MC_LOCAL(X86XMMREG, Dst);
12025 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12026 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12027 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12028 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12030 IEM_MC_PREPARE_SSE_USAGE();
12031 IEM_MC_REF_MXCSR(pfMxcsr);
12032 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
12033 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12034 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12035 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12036 } IEM_MC_ELSE() {
12037 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12038 } IEM_MC_ENDIF();
12039
12040 IEM_MC_ADVANCE_RIP_AND_FINISH();
12041 IEM_MC_END();
12042 }
12043 else
12044 {
12045 /*
12046 * XMM64, [mem64].
12047 */
12048 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12049 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12050 IEM_MC_LOCAL(X86XMMREG, Dst);
12051 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12052 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12053 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12055
12056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12057 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12058 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12060 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12061 IEM_MC_PREPARE_SSE_USAGE();
12062
12063 IEM_MC_REF_MXCSR(pfMxcsr);
12064 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12065 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12066 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12067 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12068 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12069 } IEM_MC_ELSE() {
12070 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12071 } IEM_MC_ENDIF();
12072
12073 IEM_MC_ADVANCE_RIP_AND_FINISH();
12074 IEM_MC_END();
12075 }
12076}
12077
12078
12079/** Opcode 0x0f 0xc3. */
12080FNIEMOP_DEF(iemOp_movnti_My_Gy)
12081{
12082 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12083
12084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12085
12086 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12087 if (IEM_IS_MODRM_MEM_MODE(bRm))
12088 {
12089 switch (pVCpu->iem.s.enmEffOpSize)
12090 {
12091 case IEMMODE_32BIT:
12092 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
12093 IEM_MC_LOCAL(uint32_t, u32Value);
12094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12095
12096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12098
12099 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12100 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12101 IEM_MC_ADVANCE_RIP_AND_FINISH();
12102 IEM_MC_END();
12103 break;
12104
12105 case IEMMODE_64BIT:
12106 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12107 IEM_MC_LOCAL(uint64_t, u64Value);
12108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12109
12110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12112
12113 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12114 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12115 IEM_MC_ADVANCE_RIP_AND_FINISH();
12116 IEM_MC_END();
12117 break;
12118
12119 case IEMMODE_16BIT:
12120 /** @todo check this form. */
12121 IEMOP_RAISE_INVALID_OPCODE_RET();
12122
12123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12124 }
12125 }
12126 else
12127 IEMOP_RAISE_INVALID_OPCODE_RET();
12128}
12129
12130
12131/* Opcode 0x66 0x0f 0xc3 - invalid */
12132/* Opcode 0xf3 0x0f 0xc3 - invalid */
12133/* Opcode 0xf2 0x0f 0xc3 - invalid */
12134
12135
12136/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12137FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12138{
12139 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12141 if (IEM_IS_MODRM_REG_MODE(bRm))
12142 {
12143 /*
12144 * Register, register.
12145 */
12146 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12147 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12148 IEM_MC_LOCAL(uint16_t, uValue);
12149
12150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12151 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12152 IEM_MC_PREPARE_FPU_USAGE();
12153 IEM_MC_FPU_TO_MMX_MODE();
12154
12155 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12156 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12157
12158 IEM_MC_ADVANCE_RIP_AND_FINISH();
12159 IEM_MC_END();
12160 }
12161 else
12162 {
12163 /*
12164 * Register, memory.
12165 */
12166 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12168 IEM_MC_LOCAL(uint16_t, uValue);
12169
12170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12171 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12173 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12174 IEM_MC_PREPARE_FPU_USAGE();
12175
12176 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12177 IEM_MC_FPU_TO_MMX_MODE();
12178 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12179
12180 IEM_MC_ADVANCE_RIP_AND_FINISH();
12181 IEM_MC_END();
12182 }
12183}
12184
12185
12186/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12187FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12188{
12189 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12191 if (IEM_IS_MODRM_REG_MODE(bRm))
12192 {
12193 /*
12194 * Register, register.
12195 */
12196 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12197 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12199
12200 IEM_MC_LOCAL(uint16_t, uValue);
12201 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12202 IEM_MC_PREPARE_SSE_USAGE();
12203
12204 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12205 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12206 IEM_MC_ADVANCE_RIP_AND_FINISH();
12207 IEM_MC_END();
12208 }
12209 else
12210 {
12211 /*
12212 * Register, memory.
12213 */
12214 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12216 IEM_MC_LOCAL(uint16_t, uValue);
12217
12218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12219 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12221 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12222 IEM_MC_PREPARE_SSE_USAGE();
12223
12224 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12225 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12226 IEM_MC_ADVANCE_RIP_AND_FINISH();
12227 IEM_MC_END();
12228 }
12229}
12230
12231
12232/* Opcode 0xf3 0x0f 0xc4 - invalid */
12233/* Opcode 0xf2 0x0f 0xc4 - invalid */
12234
12235
12236/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12237FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12238{
12239 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12241 if (IEM_IS_MODRM_REG_MODE(bRm))
12242 {
12243 /*
12244 * Greg32, MMX, imm8.
12245 */
12246 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12247 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12249 IEM_MC_LOCAL(uint16_t, uValue);
12250 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12251 IEM_MC_PREPARE_FPU_USAGE();
12252 IEM_MC_FPU_TO_MMX_MODE();
12253 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12254 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12255 IEM_MC_ADVANCE_RIP_AND_FINISH();
12256 IEM_MC_END();
12257 }
12258 /* No memory operand. */
12259 else
12260 IEMOP_RAISE_INVALID_OPCODE_RET();
12261}
12262
12263
12264/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12265FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12266{
12267 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12269 if (IEM_IS_MODRM_REG_MODE(bRm))
12270 {
12271 /*
12272 * Greg32, XMM, imm8.
12273 */
12274 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12275 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12277 IEM_MC_LOCAL(uint16_t, uValue);
12278 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12279 IEM_MC_PREPARE_SSE_USAGE();
12280 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12281 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12282 IEM_MC_ADVANCE_RIP_AND_FINISH();
12283 IEM_MC_END();
12284 }
12285 /* No memory operand. */
12286 else
12287 IEMOP_RAISE_INVALID_OPCODE_RET();
12288}
12289
12290
12291/* Opcode 0xf3 0x0f 0xc5 - invalid */
12292/* Opcode 0xf2 0x0f 0xc5 - invalid */
12293
12294
12295/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12296FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12297{
12298 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12300 if (IEM_IS_MODRM_REG_MODE(bRm))
12301 {
12302 /*
12303 * XMM, XMM, imm8.
12304 */
12305 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12306 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12308 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12309 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12310 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12311 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12312 IEM_MC_PREPARE_SSE_USAGE();
12313 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12314 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12315 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12316 IEM_MC_ADVANCE_RIP_AND_FINISH();
12317 IEM_MC_END();
12318 }
12319 else
12320 {
12321 /*
12322 * XMM, [mem128], imm8.
12323 */
12324 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12325 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12326 IEM_MC_LOCAL(RTUINT128U, uSrc);
12327 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12329
12330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12331 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12332 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12334 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12335 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12336
12337 IEM_MC_PREPARE_SSE_USAGE();
12338 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12339 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12340
12341 IEM_MC_ADVANCE_RIP_AND_FINISH();
12342 IEM_MC_END();
12343 }
12344}
12345
12346
12347/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12348FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12349{
12350 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12352 if (IEM_IS_MODRM_REG_MODE(bRm))
12353 {
12354 /*
12355 * XMM, XMM, imm8.
12356 */
12357 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12358 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12360 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12361 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12362 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12364 IEM_MC_PREPARE_SSE_USAGE();
12365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12367 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12368 IEM_MC_ADVANCE_RIP_AND_FINISH();
12369 IEM_MC_END();
12370 }
12371 else
12372 {
12373 /*
12374 * XMM, [mem128], imm8.
12375 */
12376 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12378 IEM_MC_LOCAL(RTUINT128U, uSrc);
12379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12381
12382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12383 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12384 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12386 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12387 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12388
12389 IEM_MC_PREPARE_SSE_USAGE();
12390 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12391 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12392
12393 IEM_MC_ADVANCE_RIP_AND_FINISH();
12394 IEM_MC_END();
12395 }
12396}
12397
12398
12399/* Opcode 0xf3 0x0f 0xc6 - invalid */
12400/* Opcode 0xf2 0x0f 0xc6 - invalid */
12401
12402
12403/**
12404 * @opmaps grp9
12405 * @opcode /1
12406 * @opcodesub !11 mr/reg rex.w=0
12407 * @oppfx n/a
12408 * @opflmodify zf
12409 */
12410FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12411{
12412 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12413#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12414 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12417 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12418 \
12419 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12420 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12421 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12422 \
12423 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12424 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12425 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12426 \
12427 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12428 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12429 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12430 \
12431 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12432 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12433 \
12434 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12435 IEM_MC_COMMIT_EFLAGS(EFlags); \
12436 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12437 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12438 } IEM_MC_ENDIF(); \
12439 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12440 \
12441 IEM_MC_END()
12442 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12443 {
12444 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12445 }
12446 else
12447 {
12448 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12449 }
12450}
12451
12452
12453/**
12454 * @opmaps grp9
12455 * @opcode /1
12456 * @opcodesub !11 mr/reg rex.w=1
12457 * @oppfx n/a
12458 * @opflmodify zf
12459 */
12460FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12461{
12462 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12463 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12464 {
12465 /*
12466 * This is hairy, very hairy macro fun. We're walking a fine line
12467 * here to make the code parsable by IEMAllInstPython.py and fit into
12468 * the patterns IEMAllThrdPython.py requires for the code morphing.
12469 */
12470#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12471 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12474 IEMOP_HLP_DONE_DECODING(); \
12475 \
12476 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12477 bUnmapInfoStmt; \
12478 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12479 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12480 \
12481 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12482 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12483 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12484 \
12485 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12486 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12487 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12488 \
12489 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12490
12491#define BODY_CMPXCHG16B_TAIL(a_Type) \
12492 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12493 IEM_MC_COMMIT_EFLAGS(EFlags); \
12494 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12495 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12496 } IEM_MC_ENDIF(); \
12497 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12498 IEM_MC_END()
12499
12500#ifdef RT_ARCH_AMD64
12501 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12502 {
12503 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12504 {
12505 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12506 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12507 BODY_CMPXCHG16B_TAIL(RW);
12508 }
12509 else
12510 {
12511 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12512 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12513 BODY_CMPXCHG16B_TAIL(ATOMIC);
12514 }
12515 }
12516 else
12517 { /* (see comments in #else case below) */
12518 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12519 {
12520 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12521 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12522 BODY_CMPXCHG16B_TAIL(RW);
12523 }
12524 else
12525 {
12526 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12527 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12528 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12529 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12530 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12531 pEFlags, bUnmapInfo);
12532 IEM_MC_END();
12533 }
12534 }
12535
12536#elif defined(RT_ARCH_ARM64)
12537 /** @todo may require fallback for unaligned accesses... */
12538 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12539 {
12540 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12541 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12542 BODY_CMPXCHG16B_TAIL(RW);
12543 }
12544 else
12545 {
12546 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12547 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12548 BODY_CMPXCHG16B_TAIL(ATOMIC);
12549 }
12550
12551#else
12552 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12553 accesses and not all all atomic, which works fine on in UNI CPU guest
12554 configuration (ignoring DMA). If guest SMP is active we have no choice
12555 but to use a rendezvous callback here. Sigh. */
12556 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12557 {
12558 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12559 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12560 BODY_CMPXCHG16B_TAIL(RW);
12561 }
12562 else
12563 {
12564 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12565 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12566 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12567 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12568 iemCImpl_cmpxchg16b_fallback_rendezvous,
12569 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12570 IEM_MC_END();
12571 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12572 }
12573#endif
12574
12575#undef BODY_CMPXCHG16B
12576 }
12577 Log(("cmpxchg16b -> #UD\n"));
12578 IEMOP_RAISE_INVALID_OPCODE_RET();
12579}
12580
12581FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12582{
12583 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12584 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12585 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12586}
12587
12588
12589/** Opcode 0x0f 0xc7 11/6. */
12590FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12591{
12592 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12593 IEMOP_RAISE_INVALID_OPCODE_RET();
12594
12595 if (IEM_IS_MODRM_REG_MODE(bRm))
12596 {
12597 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12599 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12600 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12601 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12602 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12603 iemCImpl_rdrand, iReg, enmEffOpSize);
12604 IEM_MC_END();
12605 }
12606 /* Register only. */
12607 else
12608 IEMOP_RAISE_INVALID_OPCODE_RET();
12609}
12610
12611/** Opcode 0x0f 0xc7 !11/6. */
12612#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12613FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12614{
12615 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12616 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12617 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12618 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12619 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12621 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12622 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12623 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12624 IEM_MC_END();
12625}
12626#else
12627FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12628#endif
12629
12630/** Opcode 0x66 0x0f 0xc7 !11/6. */
12631#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12632FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12633{
12634 IEMOP_MNEMONIC(vmclear, "vmclear");
12635 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12636 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12637 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12638 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12640 IEMOP_HLP_DONE_DECODING();
12641 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12642 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12643 IEM_MC_END();
12644}
12645#else
12646FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12647#endif
12648
12649/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12650#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12651FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12652{
12653 IEMOP_MNEMONIC(vmxon, "vmxon");
12654 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12655 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12656 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12658 IEMOP_HLP_DONE_DECODING();
12659 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12660 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12661 IEM_MC_END();
12662}
12663#else
12664FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12665#endif
12666
12667/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12668#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12669FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12670{
12671 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12672 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12673 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12674 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12675 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12677 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12678 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12679 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12680 IEM_MC_END();
12681}
12682#else
12683FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12684#endif
12685
12686/** Opcode 0x0f 0xc7 11/7. */
12687FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12688{
12689 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12690 IEMOP_RAISE_INVALID_OPCODE_RET();
12691
12692 if (IEM_IS_MODRM_REG_MODE(bRm))
12693 {
12694 /* register destination. */
12695 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12697 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12698 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12699 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12700 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12701 iemCImpl_rdseed, iReg, enmEffOpSize);
12702 IEM_MC_END();
12703 }
12704 /* Register only. */
12705 else
12706 IEMOP_RAISE_INVALID_OPCODE_RET();
12707}
12708
12709/**
12710 * Group 9 jump table for register variant.
12711 */
12712IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12713{ /* pfx: none, 066h, 0f3h, 0f2h */
12714 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12715 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12716 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12717 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12718 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12719 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12720 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12721 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12722};
12723AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12724
12725
12726/**
12727 * Group 9 jump table for memory variant.
12728 */
12729IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12730{ /* pfx: none, 066h, 0f3h, 0f2h */
12731 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12732 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12733 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12734 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12735 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12736 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12737 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12738 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12739};
12740AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12741
12742
12743/** Opcode 0x0f 0xc7. */
12744FNIEMOP_DEF(iemOp_Grp9)
12745{
12746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12747 if (IEM_IS_MODRM_REG_MODE(bRm))
12748 /* register, register */
12749 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12750 + pVCpu->iem.s.idxPrefix], bRm);
12751 /* memory, register */
12752 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12753 + pVCpu->iem.s.idxPrefix], bRm);
12754}
12755
12756
12757/**
12758 * Common 'bswap register' helper.
12759 */
12760FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12761{
12762 switch (pVCpu->iem.s.enmEffOpSize)
12763 {
12764 case IEMMODE_16BIT:
12765 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12767 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12768 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12769 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12770 IEM_MC_ADVANCE_RIP_AND_FINISH();
12771 IEM_MC_END();
12772 break;
12773
12774 case IEMMODE_32BIT:
12775 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12777 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12778 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12779 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12780 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12781 IEM_MC_ADVANCE_RIP_AND_FINISH();
12782 IEM_MC_END();
12783 break;
12784
12785 case IEMMODE_64BIT:
12786 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12788 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12789 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12790 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12791 IEM_MC_ADVANCE_RIP_AND_FINISH();
12792 IEM_MC_END();
12793 break;
12794
12795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12796 }
12797}
12798
12799
12800/** Opcode 0x0f 0xc8. */
12801FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12802{
12803 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12804 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12805 prefix. REX.B is the correct prefix it appears. For a parallel
12806 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12807 IEMOP_HLP_MIN_486();
12808 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12809}
12810
12811
12812/** Opcode 0x0f 0xc9. */
12813FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12814{
12815 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12816 IEMOP_HLP_MIN_486();
12817 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12818}
12819
12820
12821/** Opcode 0x0f 0xca. */
12822FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12823{
12824 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12825 IEMOP_HLP_MIN_486();
12826 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12827}
12828
12829
12830/** Opcode 0x0f 0xcb. */
12831FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12832{
12833 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12834 IEMOP_HLP_MIN_486();
12835 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12836}
12837
12838
12839/** Opcode 0x0f 0xcc. */
12840FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12841{
12842 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12843 IEMOP_HLP_MIN_486();
12844 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12845}
12846
12847
12848/** Opcode 0x0f 0xcd. */
12849FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12850{
12851 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12852 IEMOP_HLP_MIN_486();
12853 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12854}
12855
12856
12857/** Opcode 0x0f 0xce. */
12858FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12859{
12860 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12861 IEMOP_HLP_MIN_486();
12862 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12863}
12864
12865
12866/** Opcode 0x0f 0xcf. */
12867FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12868{
12869 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12870 IEMOP_HLP_MIN_486();
12871 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12872}
12873
12874
12875/* Opcode 0x0f 0xd0 - invalid */
12876
12877
12878/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12879FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12880{
12881 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12882 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12883}
12884
12885
12886/* Opcode 0xf3 0x0f 0xd0 - invalid */
12887
12888
12889/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12890FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12891{
12892 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12893 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12894}
12895
12896
12897
12898/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12899FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12900{
12901 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12902 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12903}
12904
12905/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12906FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12907{
12908 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12909 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12910}
12911
12912/* Opcode 0xf3 0x0f 0xd1 - invalid */
12913/* Opcode 0xf2 0x0f 0xd1 - invalid */
12914
12915/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12916FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12917{
12918 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12919 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12920}
12921
12922
12923/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12924FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12925{
12926 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12927 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12928}
12929
12930
12931/* Opcode 0xf3 0x0f 0xd2 - invalid */
12932/* Opcode 0xf2 0x0f 0xd2 - invalid */
12933
12934/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12935FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12936{
12937 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12938 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12939}
12940
12941
12942/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12943FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12944{
12945 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12946 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12947}
12948
12949
12950/* Opcode 0xf3 0x0f 0xd3 - invalid */
12951/* Opcode 0xf2 0x0f 0xd3 - invalid */
12952
12953
12954/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12955FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12956{
12957 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12958 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12959}
12960
12961
12962/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12963FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12964{
12965 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12966 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddq_u128);
12967}
12968
12969
12970/* Opcode 0xf3 0x0f 0xd4 - invalid */
12971/* Opcode 0xf2 0x0f 0xd4 - invalid */
12972
12973/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12974FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12975{
12976 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12977 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12978}
12979
12980/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12981FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12982{
12983 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12984 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmullw_u128);
12985}
12986
12987
12988/* Opcode 0xf3 0x0f 0xd5 - invalid */
12989/* Opcode 0xf2 0x0f 0xd5 - invalid */
12990
12991/* Opcode 0x0f 0xd6 - invalid */
12992
12993/**
12994 * @opcode 0xd6
12995 * @oppfx 0x66
12996 * @opcpuid sse2
12997 * @opgroup og_sse2_pcksclr_datamove
12998 * @opxcpttype none
12999 * @optest op1=-1 op2=2 -> op1=2
13000 * @optest op1=0 op2=-42 -> op1=-42
13001 */
13002FNIEMOP_DEF(iemOp_movq_Wq_Vq)
13003{
13004 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13006 if (IEM_IS_MODRM_REG_MODE(bRm))
13007 {
13008 /*
13009 * Register, register.
13010 */
13011 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13013 IEM_MC_LOCAL(uint64_t, uSrc);
13014
13015 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13016 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13017
13018 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13019 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
13020
13021 IEM_MC_ADVANCE_RIP_AND_FINISH();
13022 IEM_MC_END();
13023 }
13024 else
13025 {
13026 /*
13027 * Memory, register.
13028 */
13029 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13030 IEM_MC_LOCAL(uint64_t, uSrc);
13031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13032
13033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13035 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13036 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13037
13038 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13039 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13040
13041 IEM_MC_ADVANCE_RIP_AND_FINISH();
13042 IEM_MC_END();
13043 }
13044}
13045
13046
13047/**
13048 * @opcode 0xd6
13049 * @opcodesub 11 mr/reg
13050 * @oppfx f3
13051 * @opcpuid sse2
13052 * @opgroup og_sse2_simdint_datamove
13053 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13054 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13055 */
13056FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13057{
13058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13059 if (IEM_IS_MODRM_REG_MODE(bRm))
13060 {
13061 /*
13062 * Register, register.
13063 */
13064 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13065 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13067 IEM_MC_LOCAL(uint64_t, uSrc);
13068
13069 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13070 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13071 IEM_MC_FPU_TO_MMX_MODE();
13072
13073 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13074 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13075
13076 IEM_MC_ADVANCE_RIP_AND_FINISH();
13077 IEM_MC_END();
13078 }
13079
13080 /**
13081 * @opdone
13082 * @opmnemonic udf30fd6mem
13083 * @opcode 0xd6
13084 * @opcodesub !11 mr/reg
13085 * @oppfx f3
13086 * @opunused intel-modrm
13087 * @opcpuid sse
13088 * @optest ->
13089 */
13090 else
13091 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13092}
13093
13094
13095/**
13096 * @opcode 0xd6
13097 * @opcodesub 11 mr/reg
13098 * @oppfx f2
13099 * @opcpuid sse2
13100 * @opgroup og_sse2_simdint_datamove
13101 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13102 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13103 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13104 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13105 * @optest op1=-42 op2=0xfedcba9876543210
13106 * -> op1=0xfedcba9876543210 ftw=0xff
13107 */
13108FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13109{
13110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13111 if (IEM_IS_MODRM_REG_MODE(bRm))
13112 {
13113 /*
13114 * Register, register.
13115 */
13116 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13117 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13119 IEM_MC_LOCAL(uint64_t, uSrc);
13120
13121 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13122 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13123 IEM_MC_FPU_TO_MMX_MODE();
13124
13125 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13126 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13127
13128 IEM_MC_ADVANCE_RIP_AND_FINISH();
13129 IEM_MC_END();
13130 }
13131
13132 /**
13133 * @opdone
13134 * @opmnemonic udf20fd6mem
13135 * @opcode 0xd6
13136 * @opcodesub !11 mr/reg
13137 * @oppfx f2
13138 * @opunused intel-modrm
13139 * @opcpuid sse
13140 * @optest ->
13141 */
13142 else
13143 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13144}
13145
13146
13147/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13148FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13149{
13150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13151 /* Docs says register only. */
13152 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13153 {
13154 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13155 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13156 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13158 IEM_MC_ARG(uint64_t *, puDst, 0);
13159 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13160 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13161 IEM_MC_PREPARE_FPU_USAGE();
13162 IEM_MC_FPU_TO_MMX_MODE();
13163
13164 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13165 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13166 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13167
13168 IEM_MC_ADVANCE_RIP_AND_FINISH();
13169 IEM_MC_END();
13170 }
13171 else
13172 IEMOP_RAISE_INVALID_OPCODE_RET();
13173}
13174
13175
13176/** Opcode 0x66 0x0f 0xd7 - */
13177FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13178{
13179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13180 /* Docs says register only. */
13181 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13182 {
13183 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13184 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13187 IEM_MC_ARG(uint64_t *, puDst, 0);
13188 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13189 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13190 IEM_MC_PREPARE_SSE_USAGE();
13191 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13192 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13193 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13194 IEM_MC_ADVANCE_RIP_AND_FINISH();
13195 IEM_MC_END();
13196 }
13197 else
13198 IEMOP_RAISE_INVALID_OPCODE_RET();
13199}
13200
13201
13202/* Opcode 0xf3 0x0f 0xd7 - invalid */
13203/* Opcode 0xf2 0x0f 0xd7 - invalid */
13204
13205
13206/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13207FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13208{
13209 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13210 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13211}
13212
13213
13214/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13215FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13216{
13217 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13218 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13219}
13220
13221
13222/* Opcode 0xf3 0x0f 0xd8 - invalid */
13223/* Opcode 0xf2 0x0f 0xd8 - invalid */
13224
13225/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13226FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13227{
13228 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13229 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13230}
13231
13232
13233/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13234FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13235{
13236 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13237 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13238}
13239
13240
13241/* Opcode 0xf3 0x0f 0xd9 - invalid */
13242/* Opcode 0xf2 0x0f 0xd9 - invalid */
13243
13244/** Opcode 0x0f 0xda - pminub Pq, Qq */
13245FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13246{
13247 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13248 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13249}
13250
13251
13252/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13253FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13254{
13255 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13256 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13257}
13258
13259/* Opcode 0xf3 0x0f 0xda - invalid */
13260/* Opcode 0xf2 0x0f 0xda - invalid */
13261
13262/** Opcode 0x0f 0xdb - pand Pq, Qq */
13263FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13264{
13265 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13266 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13267}
13268
13269
13270/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13271FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13272{
13273 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13274 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pand_u128);
13275}
13276
13277
13278/* Opcode 0xf3 0x0f 0xdb - invalid */
13279/* Opcode 0xf2 0x0f 0xdb - invalid */
13280
13281/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13282FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13283{
13284 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13285 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13286}
13287
13288
13289/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13290FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13291{
13292 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13293 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddusb_u128);
13294}
13295
13296
13297/* Opcode 0xf3 0x0f 0xdc - invalid */
13298/* Opcode 0xf2 0x0f 0xdc - invalid */
13299
13300/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13301FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13302{
13303 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13304 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13305}
13306
13307
13308/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13309FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13310{
13311 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13312 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddusw_u128);
13313}
13314
13315
13316/* Opcode 0xf3 0x0f 0xdd - invalid */
13317/* Opcode 0xf2 0x0f 0xdd - invalid */
13318
13319/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13320FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13321{
13322 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13323 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13324}
13325
13326
13327/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13328FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13329{
13330 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13331 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13332}
13333
13334/* Opcode 0xf3 0x0f 0xde - invalid */
13335/* Opcode 0xf2 0x0f 0xde - invalid */
13336
13337
13338/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13339FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13340{
13341 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13342 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13343}
13344
13345
13346/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13347FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13348{
13349 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13350 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13351}
13352
13353
13354/* Opcode 0xf3 0x0f 0xdf - invalid */
13355/* Opcode 0xf2 0x0f 0xdf - invalid */
13356
13357/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13358FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13359{
13360 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13361 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13362}
13363
13364
13365/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13366FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13367{
13368 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13369 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13370}
13371
13372
13373/* Opcode 0xf3 0x0f 0xe0 - invalid */
13374/* Opcode 0xf2 0x0f 0xe0 - invalid */
13375
13376/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13377FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13378{
13379 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13380 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13381}
13382
13383
13384/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13385FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13386{
13387 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13388 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13389}
13390
13391
13392/* Opcode 0xf3 0x0f 0xe1 - invalid */
13393/* Opcode 0xf2 0x0f 0xe1 - invalid */
13394
13395/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13396FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13397{
13398 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13399 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13400}
13401
13402
13403/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13404FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13405{
13406 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13407 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13408}
13409
13410
13411/* Opcode 0xf3 0x0f 0xe2 - invalid */
13412/* Opcode 0xf2 0x0f 0xe2 - invalid */
13413
13414/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13415FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13416{
13417 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13418 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13419}
13420
13421
13422/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13423FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13424{
13425 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13426 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13427}
13428
13429
13430/* Opcode 0xf3 0x0f 0xe3 - invalid */
13431/* Opcode 0xf2 0x0f 0xe3 - invalid */
13432
13433/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13434FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13435{
13436 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13437 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13438}
13439
13440
13441/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13442FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13443{
13444 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13445 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13446}
13447
13448
13449/* Opcode 0xf3 0x0f 0xe4 - invalid */
13450/* Opcode 0xf2 0x0f 0xe4 - invalid */
13451
13452/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13453FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13454{
13455 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13456 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13457}
13458
13459
13460/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13461FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13462{
13463 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13464 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13465}
13466
13467
13468/* Opcode 0xf3 0x0f 0xe5 - invalid */
13469/* Opcode 0xf2 0x0f 0xe5 - invalid */
13470/* Opcode 0x0f 0xe6 - invalid */
13471
13472
13473/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13474FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13475{
13476 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13477 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13478}
13479
13480
13481/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13482FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13483{
13484 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13485 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13486}
13487
13488
13489/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13490FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13491{
13492 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13493 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13494}
13495
13496
13497/**
13498 * @opcode 0xe7
13499 * @opcodesub !11 mr/reg
13500 * @oppfx none
13501 * @opcpuid sse
13502 * @opgroup og_sse1_cachect
13503 * @opxcpttype none
13504 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13505 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13506 */
13507FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13508{
13509 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13511 if (IEM_IS_MODRM_MEM_MODE(bRm))
13512 {
13513 /* Register, memory. */
13514 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13515 IEM_MC_LOCAL(uint64_t, uSrc);
13516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13517
13518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13520 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13521 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13522 IEM_MC_FPU_TO_MMX_MODE();
13523
13524 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13525 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13526
13527 IEM_MC_ADVANCE_RIP_AND_FINISH();
13528 IEM_MC_END();
13529 }
13530 /**
13531 * @opdone
13532 * @opmnemonic ud0fe7reg
13533 * @opcode 0xe7
13534 * @opcodesub 11 mr/reg
13535 * @oppfx none
13536 * @opunused immediate
13537 * @opcpuid sse
13538 * @optest ->
13539 */
13540 else
13541 IEMOP_RAISE_INVALID_OPCODE_RET();
13542}
13543
13544/**
13545 * @opcode 0xe7
13546 * @opcodesub !11 mr/reg
13547 * @oppfx 0x66
13548 * @opcpuid sse2
13549 * @opgroup og_sse2_cachect
13550 * @opxcpttype 1
13551 * @optest op1=-1 op2=2 -> op1=2
13552 * @optest op1=0 op2=-42 -> op1=-42
13553 */
13554FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13555{
13556 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13558 if (IEM_IS_MODRM_MEM_MODE(bRm))
13559 {
13560 /* Register, memory. */
13561 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13562 IEM_MC_LOCAL(RTUINT128U, uSrc);
13563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13564
13565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13567 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13568 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13569
13570 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13571 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13572
13573 IEM_MC_ADVANCE_RIP_AND_FINISH();
13574 IEM_MC_END();
13575 }
13576
13577 /**
13578 * @opdone
13579 * @opmnemonic ud660fe7reg
13580 * @opcode 0xe7
13581 * @opcodesub 11 mr/reg
13582 * @oppfx 0x66
13583 * @opunused immediate
13584 * @opcpuid sse
13585 * @optest ->
13586 */
13587 else
13588 IEMOP_RAISE_INVALID_OPCODE_RET();
13589}
13590
13591/* Opcode 0xf3 0x0f 0xe7 - invalid */
13592/* Opcode 0xf2 0x0f 0xe7 - invalid */
13593
13594
13595/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13596FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13597{
13598 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13599 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13600}
13601
13602
13603/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13604FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13605{
13606 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13607 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13608}
13609
13610
13611/* Opcode 0xf3 0x0f 0xe8 - invalid */
13612/* Opcode 0xf2 0x0f 0xe8 - invalid */
13613
13614/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13615FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13616{
13617 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13618 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13619}
13620
13621
13622/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13623FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13624{
13625 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13626 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13627}
13628
13629
13630/* Opcode 0xf3 0x0f 0xe9 - invalid */
13631/* Opcode 0xf2 0x0f 0xe9 - invalid */
13632
13633
13634/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13635FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13636{
13637 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13638 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13639}
13640
13641
13642/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13643FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13644{
13645 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13646 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13647}
13648
13649
13650/* Opcode 0xf3 0x0f 0xea - invalid */
13651/* Opcode 0xf2 0x0f 0xea - invalid */
13652
13653
13654/** Opcode 0x0f 0xeb - por Pq, Qq */
13655FNIEMOP_DEF(iemOp_por_Pq_Qq)
13656{
13657 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13658 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13659}
13660
13661
13662/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13663FNIEMOP_DEF(iemOp_por_Vx_Wx)
13664{
13665 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13666 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_por_u128);
13667}
13668
13669
13670/* Opcode 0xf3 0x0f 0xeb - invalid */
13671/* Opcode 0xf2 0x0f 0xeb - invalid */
13672
13673/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13674FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13675{
13676 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13677 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13678}
13679
13680
13681/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13682FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13683{
13684 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13685 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13686}
13687
13688
13689/* Opcode 0xf3 0x0f 0xec - invalid */
13690/* Opcode 0xf2 0x0f 0xec - invalid */
13691
13692/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13693FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13694{
13695 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13696 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13697}
13698
13699
13700/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13701FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13702{
13703 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13704 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13705}
13706
13707
13708/* Opcode 0xf3 0x0f 0xed - invalid */
13709/* Opcode 0xf2 0x0f 0xed - invalid */
13710
13711
13712/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13713FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13714{
13715 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13716 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13717}
13718
13719
13720/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13721FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13722{
13723 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13724 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13725}
13726
13727
13728/* Opcode 0xf3 0x0f 0xee - invalid */
13729/* Opcode 0xf2 0x0f 0xee - invalid */
13730
13731
13732/** Opcode 0x0f 0xef - pxor Pq, Qq */
13733FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13734{
13735 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13736 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13737}
13738
13739
13740/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13741FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13742{
13743 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13744 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pxor_u128);
13745}
13746
13747
13748/* Opcode 0xf3 0x0f 0xef - invalid */
13749/* Opcode 0xf2 0x0f 0xef - invalid */
13750
13751/* Opcode 0x0f 0xf0 - invalid */
13752/* Opcode 0x66 0x0f 0xf0 - invalid */
13753
13754
13755/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13756FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13757{
13758 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13760 if (IEM_IS_MODRM_REG_MODE(bRm))
13761 {
13762 /*
13763 * Register, register - (not implemented, assuming it raises \#UD).
13764 */
13765 IEMOP_RAISE_INVALID_OPCODE_RET();
13766 }
13767 else
13768 {
13769 /*
13770 * Register, memory.
13771 */
13772 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13773 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13775
13776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13778 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13779 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13780 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13781 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13782
13783 IEM_MC_ADVANCE_RIP_AND_FINISH();
13784 IEM_MC_END();
13785 }
13786}
13787
13788
13789/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13790FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13791{
13792 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13793 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13794}
13795
13796
13797/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13798FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13799{
13800 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13801 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13802}
13803
13804
13805/* Opcode 0xf2 0x0f 0xf1 - invalid */
13806
13807/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13808FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13809{
13810 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13811 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13812}
13813
13814
13815/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13816FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13817{
13818 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13819 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13820}
13821
13822
13823/* Opcode 0xf2 0x0f 0xf2 - invalid */
13824
13825/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13826FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13827{
13828 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13829 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13830}
13831
13832
13833/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13834FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13835{
13836 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13837 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13838}
13839
13840/* Opcode 0xf2 0x0f 0xf3 - invalid */
13841
13842/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13843FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13844{
13845 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13846 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13847}
13848
13849
13850/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13851FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13852{
13853 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13854 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13855}
13856
13857
13858/* Opcode 0xf2 0x0f 0xf4 - invalid */
13859
13860/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13861FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13862{
13863 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13864 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13865}
13866
13867
13868/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13869FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13870{
13871 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13872 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13873}
13874
13875/* Opcode 0xf2 0x0f 0xf5 - invalid */
13876
13877/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13878FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13879{
13880 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13881 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13882}
13883
13884
13885/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13886FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13887{
13888 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13889 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13890}
13891
13892
13893/* Opcode 0xf2 0x0f 0xf6 - invalid */
13894
13895/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13896FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13897/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13898FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13899/* Opcode 0xf2 0x0f 0xf7 - invalid */
13900
13901
13902/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13903FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13904{
13905 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13906 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13907}
13908
13909
13910/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13911FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13912{
13913 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13914 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubb_u128);
13915}
13916
13917
13918/* Opcode 0xf2 0x0f 0xf8 - invalid */
13919
13920
13921/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13922FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13923{
13924 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13925 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13926}
13927
13928
13929/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13930FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13931{
13932 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13933 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubw_u128);
13934}
13935
13936
13937/* Opcode 0xf2 0x0f 0xf9 - invalid */
13938
13939
13940/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13941FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13942{
13943 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13944 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13945}
13946
13947
13948/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13949FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13950{
13951 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13952 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubd_u128);
13953}
13954
13955
13956/* Opcode 0xf2 0x0f 0xfa - invalid */
13957
13958
13959/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13960FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13961{
13962 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13963 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13964}
13965
13966
13967/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13968FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13969{
13970 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13971 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubq_u128);
13972}
13973
13974
13975/* Opcode 0xf2 0x0f 0xfb - invalid */
13976
13977
13978/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13979FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13980{
13981 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13982 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
13983}
13984
13985
13986/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13987FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13988{
13989 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13990 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddb_u128);
13991}
13992
13993
13994/* Opcode 0xf2 0x0f 0xfc - invalid */
13995
13996
13997/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13998FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13999{
14000 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14001 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
14002}
14003
14004
14005/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14006FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14007{
14008 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14009 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddw_u128);
14010}
14011
14012
14013/* Opcode 0xf2 0x0f 0xfd - invalid */
14014
14015
14016/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14017FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14018{
14019 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14020 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
14021}
14022
14023
14024/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14025FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14026{
14027 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14028 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddd_u128);
14029}
14030
14031
14032/* Opcode 0xf2 0x0f 0xfe - invalid */
14033
14034
14035/** Opcode **** 0x0f 0xff - UD0 */
14036FNIEMOP_DEF(iemOp_ud0)
14037{
14038 IEMOP_MNEMONIC(ud0, "ud0");
14039 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14040 {
14041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14042 if (IEM_IS_MODRM_MEM_MODE(bRm))
14043 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14044 }
14045 IEMOP_HLP_DONE_DECODING();
14046 IEMOP_RAISE_INVALID_OPCODE_RET();
14047}
14048
14049
14050
14051/**
14052 * Two byte opcode map, first byte 0x0f.
14053 *
14054 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14055 * check if it needs updating as well when making changes.
14056 */
14057const PFNIEMOP g_apfnTwoByteMap[] =
14058{
14059 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14060 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14061 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14062 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14063 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14064 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14065 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14066 /* 0x06 */ IEMOP_X4(iemOp_clts),
14067 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14068 /* 0x08 */ IEMOP_X4(iemOp_invd),
14069 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14070 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14071 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14072 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14073 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14074 /* 0x0e */ IEMOP_X4(iemOp_femms),
14075 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14076
14077 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14078 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14079 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14080 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14081 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14082 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14083 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14084 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14085 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14086 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14087 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14088 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14089 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14090 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14091 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14092 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14093
14094 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14095 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14096 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14097 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14098 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14099 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14100 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14101 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14102 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14104 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14105 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14106 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14107 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14108 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14109 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14110
14111 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14112 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14113 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14114 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14115 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14116 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14117 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14118 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14119 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14120 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14121 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14122 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14123 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14124 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14125 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14126 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14127
14128 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14129 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14130 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14131 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14132 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14133 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14134 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14135 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14136 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14137 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14138 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14139 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14140 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14141 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14142 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14143 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14144
14145 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14146 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14147 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14148 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14149 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14150 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14151 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14152 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14153 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14154 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14155 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14156 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14157 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14158 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14159 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14160 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14161
14162 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14163 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14164 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14165 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14166 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14167 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14168 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14169 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14170 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14171 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14172 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14173 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14174 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14175 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14176 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14177 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14178
14179 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14180 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14181 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14182 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14183 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14184 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14185 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14186 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14187
14188 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14189 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14190 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14191 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14192 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14193 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14194 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14195 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14196
14197 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14198 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14199 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14200 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14201 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14202 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14203 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14204 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14205 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14206 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14207 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14208 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14209 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14210 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14211 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14212 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14213
14214 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14215 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14216 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14217 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14218 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14219 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14220 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14221 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14222 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14223 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14224 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14225 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14226 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14227 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14228 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14229 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14230
14231 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14232 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14233 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14234 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14235 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14236 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14237 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14238 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14239 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14240 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14241 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14242 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14243 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14244 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14245 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14246 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14247
14248 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14249 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14250 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14251 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14252 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14253 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14254 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14255 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14256 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14257 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14258 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14259 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14260 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14261 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14262 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14263 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14264
14265 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14266 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14267 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14268 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14269 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14270 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14271 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14272 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14273 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14274 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14275 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14276 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14277 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14278 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14279 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14280 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14281
14282 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14283 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14284 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14285 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14286 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14287 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14288 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14289 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14290 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14291 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14292 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14293 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14294 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14295 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14296 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14297 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14298
14299 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14300 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14301 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14302 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14303 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14305 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14306 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14307 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14308 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14309 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14312 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14313 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14315
14316 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14317 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14318 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14319 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14320 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14322 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14323 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14324 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14325 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14326 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14327 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14328 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14329 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14330 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14331 /* 0xff */ IEMOP_X4(iemOp_ud0),
14332};
14333AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14334
14335/** @} */
14336
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette