VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 104156

Last change on this file since 104156 was 104156, checked in by vboxsync, 11 months ago

VMM/IEM: Rework microcode of cmpss/cmpsd/cmpps/cmppd/roundss/roundsd/roundps/roundpd/dpps/dppd to use IEM_MC_CALL_SSE_AIMPL_3 and don't reference the MXCSR explicitely, bugref:10641

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 513.3 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 104156 2024-04-04 12:05:54Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * Common worker for MMX instructions on the forms:
337 * pxxxx mm1, mm2/mem32
338 *
339 * The 2nd operand is the first half of a register, which in the memory case
340 * means a 32-bit memory access.
341 */
342FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
343{
344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
345 if (IEM_IS_MODRM_REG_MODE(bRm))
346 {
347 /*
348 * MMX, MMX.
349 */
350 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
352 IEM_MC_ARG(uint64_t *, puDst, 0);
353 IEM_MC_ARG(uint64_t const *, puSrc, 1);
354 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
355 IEM_MC_PREPARE_FPU_USAGE();
356 IEM_MC_FPU_TO_MMX_MODE();
357
358 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
359 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
360 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
361 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
362
363 IEM_MC_ADVANCE_RIP_AND_FINISH();
364 IEM_MC_END();
365 }
366 else
367 {
368 /*
369 * MMX, [mem32].
370 */
371 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
372 IEM_MC_ARG(uint64_t *, puDst, 0);
373 IEM_MC_LOCAL(uint64_t, uSrc);
374 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
376
377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
379 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
380 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
381
382 IEM_MC_PREPARE_FPU_USAGE();
383 IEM_MC_FPU_TO_MMX_MODE();
384
385 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
386 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
387 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
388
389 IEM_MC_ADVANCE_RIP_AND_FINISH();
390 IEM_MC_END();
391 }
392}
393
394
395/**
396 * Common worker for SSE instructions on the forms:
397 * pxxxx xmm1, xmm2/mem128
398 *
399 * The 2nd operand is the first half of a register, which in the memory case
400 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
401 *
402 * Exceptions type 4.
403 */
404FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
405{
406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
407 if (IEM_IS_MODRM_REG_MODE(bRm))
408 {
409 /*
410 * XMM, XMM.
411 */
412 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
414 IEM_MC_ARG(PRTUINT128U, puDst, 0);
415 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
416 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
418 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
419 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
420 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
421 IEM_MC_ADVANCE_RIP_AND_FINISH();
422 IEM_MC_END();
423 }
424 else
425 {
426 /*
427 * XMM, [mem128].
428 */
429 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
430 IEM_MC_ARG(PRTUINT128U, puDst, 0);
431 IEM_MC_LOCAL(RTUINT128U, uSrc);
432 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
434
435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
438 /** @todo Most CPUs probably only read the low qword. We read everything to
439 * make sure we apply segmentation and alignment checks correctly.
440 * When we have time, it would be interesting to explore what real
441 * CPUs actually does and whether it will do a TLB load for the high
442 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
443 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
444
445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
446 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
447 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
448
449 IEM_MC_ADVANCE_RIP_AND_FINISH();
450 IEM_MC_END();
451 }
452}
453
454
455/**
456 * Common worker for SSE2 instructions on the forms:
457 * pxxxx xmm1, xmm2/mem128
458 *
459 * The 2nd operand is the first half of a register, which in the memory case
460 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
461 *
462 * Exceptions type 4.
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, puDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
478 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(PRTUINT128U, puDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 /** @todo Most CPUs probably only read the low qword. We read everything to
499 * make sure we apply segmentation and alignment checks correctly.
500 * When we have time, it would be interesting to explore what real
501 * CPUs actually does and whether it will do a TLB load for the high
502 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
503 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
504
505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
506 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
507 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
508
509 IEM_MC_ADVANCE_RIP_AND_FINISH();
510 IEM_MC_END();
511 }
512}
513
514
515/**
516 * Common worker for MMX instructions on the form:
517 * pxxxx mm1, mm2/mem64
518 *
519 * The 2nd operand is the second half of a register, which in the memory case
520 * means a 64-bit memory access for MMX.
521 */
522FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
523{
524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
525 if (IEM_IS_MODRM_REG_MODE(bRm))
526 {
527 /*
528 * MMX, MMX.
529 */
530 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
531 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
532 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
534 IEM_MC_ARG(uint64_t *, puDst, 0);
535 IEM_MC_ARG(uint64_t const *, puSrc, 1);
536 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
537 IEM_MC_PREPARE_FPU_USAGE();
538 IEM_MC_FPU_TO_MMX_MODE();
539
540 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
541 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
542 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
543 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
544
545 IEM_MC_ADVANCE_RIP_AND_FINISH();
546 IEM_MC_END();
547 }
548 else
549 {
550 /*
551 * MMX, [mem64].
552 */
553 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
554 IEM_MC_ARG(uint64_t *, puDst, 0);
555 IEM_MC_LOCAL(uint64_t, uSrc);
556 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
558
559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
561 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
562 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
563
564 IEM_MC_PREPARE_FPU_USAGE();
565 IEM_MC_FPU_TO_MMX_MODE();
566
567 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
568 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
569 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
570
571 IEM_MC_ADVANCE_RIP_AND_FINISH();
572 IEM_MC_END();
573 }
574}
575
576
577/**
578 * Common worker for SSE instructions on the form:
579 * pxxxx xmm1, xmm2/mem128
580 *
581 * The 2nd operand is the second half of a register, which for SSE a 128-bit
582 * aligned access where it may read the full 128 bits or only the upper 64 bits.
583 *
584 * Exceptions type 4.
585 */
586FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
587{
588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
589 if (IEM_IS_MODRM_REG_MODE(bRm))
590 {
591 /*
592 * XMM, XMM.
593 */
594 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
596 IEM_MC_ARG(PRTUINT128U, puDst, 0);
597 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
599 IEM_MC_PREPARE_SSE_USAGE();
600 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
601 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
602 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
603 IEM_MC_ADVANCE_RIP_AND_FINISH();
604 IEM_MC_END();
605 }
606 else
607 {
608 /*
609 * XMM, [mem128].
610 */
611 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
612 IEM_MC_ARG(PRTUINT128U, puDst, 0);
613 IEM_MC_LOCAL(RTUINT128U, uSrc);
614 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
616
617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
619 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
620 /** @todo Most CPUs probably only read the high qword. We read everything to
621 * make sure we apply segmentation and alignment checks correctly.
622 * When we have time, it would be interesting to explore what real
623 * CPUs actually does and whether it will do a TLB load for the lower
624 * part or skip any associated \#PF. */
625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
626
627 IEM_MC_PREPARE_SSE_USAGE();
628 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
629 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
630
631 IEM_MC_ADVANCE_RIP_AND_FINISH();
632 IEM_MC_END();
633 }
634}
635
636
637/**
638 * Common worker for SSE instructions on the forms:
639 * pxxs xmm1, xmm2/mem128
640 *
641 * Proper alignment of the 128-bit operand is enforced.
642 * Exceptions type 2. SSE cpuid checks.
643 *
644 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
645 */
646FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
647{
648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
649 if (IEM_IS_MODRM_REG_MODE(bRm))
650 {
651 /*
652 * XMM128, XMM128.
653 */
654 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
656 IEM_MC_LOCAL(X86XMMREG, SseRes);
657 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
658 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
659 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
660 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
661 IEM_MC_PREPARE_SSE_USAGE();
662 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
663 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
664 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
665 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
666 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
667
668 IEM_MC_ADVANCE_RIP_AND_FINISH();
669 IEM_MC_END();
670 }
671 else
672 {
673 /*
674 * XMM128, [mem128].
675 */
676 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
677 IEM_MC_LOCAL(X86XMMREG, SseRes);
678 IEM_MC_LOCAL(X86XMMREG, uSrc2);
679 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
680 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
681 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
683
684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
686 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
687 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
688
689 IEM_MC_PREPARE_SSE_USAGE();
690 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
691 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
692 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
693 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
694
695 IEM_MC_ADVANCE_RIP_AND_FINISH();
696 IEM_MC_END();
697 }
698}
699
700
701/**
702 * Common worker for SSE instructions on the forms:
703 * pxxs xmm1, xmm2/mem32
704 *
705 * Proper alignment of the 128-bit operand is enforced.
706 * Exceptions type 2. SSE cpuid checks.
707 *
708 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
709 */
710FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
711{
712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
713 if (IEM_IS_MODRM_REG_MODE(bRm))
714 {
715 /*
716 * XMM128, XMM32.
717 */
718 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
720 IEM_MC_LOCAL(X86XMMREG, SseRes);
721 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
722 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
723 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
724 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
725 IEM_MC_PREPARE_SSE_USAGE();
726 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
727 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
728 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
729 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
730 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
731
732 IEM_MC_ADVANCE_RIP_AND_FINISH();
733 IEM_MC_END();
734 }
735 else
736 {
737 /*
738 * XMM128, [mem32].
739 */
740 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
741 IEM_MC_LOCAL(X86XMMREG, SseRes);
742 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
743 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
744 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
745 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
747
748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
750 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
751 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
752
753 IEM_MC_PREPARE_SSE_USAGE();
754 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
755 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
756 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
757 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
758
759 IEM_MC_ADVANCE_RIP_AND_FINISH();
760 IEM_MC_END();
761 }
762}
763
764
765/**
766 * Common worker for SSE2 instructions on the forms:
767 * pxxd xmm1, xmm2/mem128
768 *
769 * Proper alignment of the 128-bit operand is enforced.
770 * Exceptions type 2. SSE cpuid checks.
771 *
772 * @sa iemOpCommonSseFp_FullFull_To_Full
773 */
774FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
775{
776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
777 if (IEM_IS_MODRM_REG_MODE(bRm))
778 {
779 /*
780 * XMM128, XMM128.
781 */
782 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
784 IEM_MC_LOCAL(X86XMMREG, SseRes);
785 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
786 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
787 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
788 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
789 IEM_MC_PREPARE_SSE_USAGE();
790 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
791 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
792 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
793 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
794 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
795
796 IEM_MC_ADVANCE_RIP_AND_FINISH();
797 IEM_MC_END();
798 }
799 else
800 {
801 /*
802 * XMM128, [mem128].
803 */
804 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
805 IEM_MC_LOCAL(X86XMMREG, SseRes);
806 IEM_MC_LOCAL(X86XMMREG, uSrc2);
807 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
808 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
809 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
811
812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
814 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
815 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
816
817 IEM_MC_PREPARE_SSE_USAGE();
818 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
819 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
820 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
821 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
822
823 IEM_MC_ADVANCE_RIP_AND_FINISH();
824 IEM_MC_END();
825 }
826}
827
828
829/**
830 * Common worker for SSE2 instructions on the forms:
831 * pxxs xmm1, xmm2/mem64
832 *
833 * Proper alignment of the 128-bit operand is enforced.
834 * Exceptions type 2. SSE2 cpuid checks.
835 *
836 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
837 */
838FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
839{
840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
841 if (IEM_IS_MODRM_REG_MODE(bRm))
842 {
843 /*
844 * XMM, XMM.
845 */
846 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
848 IEM_MC_LOCAL(X86XMMREG, SseRes);
849 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
850 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
851 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
853 IEM_MC_PREPARE_SSE_USAGE();
854 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
855 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
856 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
857 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
858 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
859
860 IEM_MC_ADVANCE_RIP_AND_FINISH();
861 IEM_MC_END();
862 }
863 else
864 {
865 /*
866 * XMM, [mem64].
867 */
868 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
869 IEM_MC_LOCAL(X86XMMREG, SseRes);
870 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
871 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
872 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
873 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
875
876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
878 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
879 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
880
881 IEM_MC_PREPARE_SSE_USAGE();
882 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
883 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
884 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
885 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
886
887 IEM_MC_ADVANCE_RIP_AND_FINISH();
888 IEM_MC_END();
889 }
890}
891
892
893/**
894 * Common worker for SSE2 instructions on the form:
895 * pxxxx xmm1, xmm2/mem128
896 *
897 * The 2nd operand is the second half of a register, which for SSE a 128-bit
898 * aligned access where it may read the full 128 bits or only the upper 64 bits.
899 *
900 * Exceptions type 4.
901 */
902FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
903{
904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
905 if (IEM_IS_MODRM_REG_MODE(bRm))
906 {
907 /*
908 * XMM, XMM.
909 */
910 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
912 IEM_MC_ARG(PRTUINT128U, puDst, 0);
913 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
914 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
915 IEM_MC_PREPARE_SSE_USAGE();
916 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
917 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
918 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
919 IEM_MC_ADVANCE_RIP_AND_FINISH();
920 IEM_MC_END();
921 }
922 else
923 {
924 /*
925 * XMM, [mem128].
926 */
927 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
928 IEM_MC_ARG(PRTUINT128U, puDst, 0);
929 IEM_MC_LOCAL(RTUINT128U, uSrc);
930 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
932
933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
935 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
936 /** @todo Most CPUs probably only read the high qword. We read everything to
937 * make sure we apply segmentation and alignment checks correctly.
938 * When we have time, it would be interesting to explore what real
939 * CPUs actually does and whether it will do a TLB load for the lower
940 * part or skip any associated \#PF. */
941 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
942
943 IEM_MC_PREPARE_SSE_USAGE();
944 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
945 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
946
947 IEM_MC_ADVANCE_RIP_AND_FINISH();
948 IEM_MC_END();
949 }
950}
951
952
953/**
954 * Common worker for SSE3 instructions on the forms:
955 * hxxx xmm1, xmm2/mem128
956 *
957 * Proper alignment of the 128-bit operand is enforced.
958 * Exceptions type 2. SSE3 cpuid checks.
959 *
960 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
961 */
962FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
963{
964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
965 if (IEM_IS_MODRM_REG_MODE(bRm))
966 {
967 /*
968 * XMM, XMM.
969 */
970 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
972 IEM_MC_LOCAL(X86XMMREG, SseRes);
973 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
974 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
975 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
977 IEM_MC_PREPARE_SSE_USAGE();
978 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
979 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
980 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
981 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
982 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
983
984 IEM_MC_ADVANCE_RIP_AND_FINISH();
985 IEM_MC_END();
986 }
987 else
988 {
989 /*
990 * XMM, [mem128].
991 */
992 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
993 IEM_MC_LOCAL(X86XMMREG, SseRes);
994 IEM_MC_LOCAL(X86XMMREG, uSrc2);
995 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
996 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
997 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
999
1000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1002 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1003 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1004
1005 IEM_MC_PREPARE_SSE_USAGE();
1006 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1007 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1008 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1009 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1010
1011 IEM_MC_ADVANCE_RIP_AND_FINISH();
1012 IEM_MC_END();
1013 }
1014}
1015
1016
1017/** Opcode 0x0f 0x00 /0. */
1018FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1019{
1020 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1021 IEMOP_HLP_MIN_286();
1022 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1023
1024 if (IEM_IS_MODRM_REG_MODE(bRm))
1025 {
1026 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1027 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1028 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1029 }
1030
1031 /* Ignore operand size here, memory refs are always 16-bit. */
1032 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1033 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1035 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1036 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1037 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1038 IEM_MC_END();
1039}
1040
1041
1042/** Opcode 0x0f 0x00 /1. */
1043FNIEMOPRM_DEF(iemOp_Grp6_str)
1044{
1045 IEMOP_MNEMONIC(str, "str Rv/Mw");
1046 IEMOP_HLP_MIN_286();
1047 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1048
1049
1050 if (IEM_IS_MODRM_REG_MODE(bRm))
1051 {
1052 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1053 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1054 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1055 }
1056
1057 /* Ignore operand size here, memory refs are always 16-bit. */
1058 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1059 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1061 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1062 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1063 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1064 IEM_MC_END();
1065}
1066
1067
1068/** Opcode 0x0f 0x00 /2. */
1069FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1070{
1071 IEMOP_MNEMONIC(lldt, "lldt Ew");
1072 IEMOP_HLP_MIN_286();
1073 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1074
1075 if (IEM_IS_MODRM_REG_MODE(bRm))
1076 {
1077 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1078 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1079 IEM_MC_ARG(uint16_t, u16Sel, 0);
1080 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1081 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1082 IEM_MC_END();
1083 }
1084 else
1085 {
1086 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1087 IEM_MC_ARG(uint16_t, u16Sel, 0);
1088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1090 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1091 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1092 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1093 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1094 IEM_MC_END();
1095 }
1096}
1097
1098
1099/** Opcode 0x0f 0x00 /3. */
1100FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1101{
1102 IEMOP_MNEMONIC(ltr, "ltr Ew");
1103 IEMOP_HLP_MIN_286();
1104 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1105
1106 if (IEM_IS_MODRM_REG_MODE(bRm))
1107 {
1108 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1110 IEM_MC_ARG(uint16_t, u16Sel, 0);
1111 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1112 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1113 IEM_MC_END();
1114 }
1115 else
1116 {
1117 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1118 IEM_MC_ARG(uint16_t, u16Sel, 0);
1119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1122 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1123 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1124 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1125 IEM_MC_END();
1126 }
1127}
1128
1129
1130/* Need to associate flag info with the blocks, so duplicate the code. */
1131#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1132 IEMOP_HLP_MIN_286(); \
1133 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1134 \
1135 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1136 { \
1137 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1138 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1139 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1140 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1141 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1142 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1143 IEM_MC_END(); \
1144 } \
1145 else \
1146 { \
1147 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1148 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1149 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1153 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1154 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1155 IEM_MC_END(); \
1156 } (void)0
1157
1158/**
1159 * @opmaps grp6
1160 * @opcode /4
1161 * @opflmodify zf
1162 */
1163FNIEMOPRM_DEF(iemOp_Grp6_verr)
1164{
1165 IEMOP_MNEMONIC(verr, "verr Ew");
1166 IEMOP_BODY_GRP6_VERX(bRm, false);
1167}
1168
1169
1170/**
1171 * @opmaps grp6
1172 * @opcode /5
1173 * @opflmodify zf
1174 */
1175FNIEMOPRM_DEF(iemOp_Grp6_verw)
1176{
1177 IEMOP_MNEMONIC(verw, "verw Ew");
1178 IEMOP_BODY_GRP6_VERX(bRm, true);
1179}
1180
1181
1182/**
1183 * Group 6 jump table.
1184 */
1185IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1186{
1187 iemOp_Grp6_sldt,
1188 iemOp_Grp6_str,
1189 iemOp_Grp6_lldt,
1190 iemOp_Grp6_ltr,
1191 iemOp_Grp6_verr,
1192 iemOp_Grp6_verw,
1193 iemOp_InvalidWithRM,
1194 iemOp_InvalidWithRM
1195};
1196
1197/** Opcode 0x0f 0x00. */
1198FNIEMOP_DEF(iemOp_Grp6)
1199{
1200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1201 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1202}
1203
1204
1205/** Opcode 0x0f 0x01 /0. */
1206FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1207{
1208 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1209 IEMOP_HLP_MIN_286();
1210 IEMOP_HLP_64BIT_OP_SIZE();
1211 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1212 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1215 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1216 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1217 IEM_MC_END();
1218}
1219
1220
1221/** Opcode 0x0f 0x01 /0. */
1222FNIEMOP_DEF(iemOp_Grp7_vmcall)
1223{
1224 IEMOP_MNEMONIC(vmcall, "vmcall");
1225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1226
1227 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1228 want all hypercalls regardless of instruction used, and if a
1229 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1230 (NEM/win makes ASSUMPTIONS about this behavior.) */
1231 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1232}
1233
1234
1235/** Opcode 0x0f 0x01 /0. */
1236#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1237FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1238{
1239 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1240 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1241 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1242 IEMOP_HLP_DONE_DECODING();
1243 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1244 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1245 iemCImpl_vmlaunch);
1246}
1247#else
1248FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1249{
1250 IEMOP_BITCH_ABOUT_STUB();
1251 IEMOP_RAISE_INVALID_OPCODE_RET();
1252}
1253#endif
1254
1255
1256/** Opcode 0x0f 0x01 /0. */
1257#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1258FNIEMOP_DEF(iemOp_Grp7_vmresume)
1259{
1260 IEMOP_MNEMONIC(vmresume, "vmresume");
1261 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1262 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1263 IEMOP_HLP_DONE_DECODING();
1264 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1265 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1266 iemCImpl_vmresume);
1267}
1268#else
1269FNIEMOP_DEF(iemOp_Grp7_vmresume)
1270{
1271 IEMOP_BITCH_ABOUT_STUB();
1272 IEMOP_RAISE_INVALID_OPCODE_RET();
1273}
1274#endif
1275
1276
1277/** Opcode 0x0f 0x01 /0. */
1278#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1279FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1280{
1281 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1282 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1283 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1284 IEMOP_HLP_DONE_DECODING();
1285 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1286}
1287#else
1288FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1289{
1290 IEMOP_BITCH_ABOUT_STUB();
1291 IEMOP_RAISE_INVALID_OPCODE_RET();
1292}
1293#endif
1294
1295
1296/** Opcode 0x0f 0x01 /1. */
1297FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1298{
1299 IEMOP_MNEMONIC(sidt, "sidt Ms");
1300 IEMOP_HLP_MIN_286();
1301 IEMOP_HLP_64BIT_OP_SIZE();
1302 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1303 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1306 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1307 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1308 IEM_MC_END();
1309}
1310
1311
1312/** Opcode 0x0f 0x01 /1. */
1313FNIEMOP_DEF(iemOp_Grp7_monitor)
1314{
1315 IEMOP_MNEMONIC(monitor, "monitor");
1316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1317 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1318}
1319
1320
1321/** Opcode 0x0f 0x01 /1. */
1322FNIEMOP_DEF(iemOp_Grp7_mwait)
1323{
1324 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1326 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1327}
1328
1329
1330/** Opcode 0x0f 0x01 /2. */
1331FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1332{
1333 IEMOP_MNEMONIC(lgdt, "lgdt");
1334 IEMOP_HLP_64BIT_OP_SIZE();
1335 IEM_MC_BEGIN(0, 0);
1336 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1339 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1340 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1341 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1342 IEM_MC_END();
1343}
1344
1345
1346/** Opcode 0x0f 0x01 0xd0. */
1347FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1348{
1349 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1350 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1351 {
1352 /** @todo r=ramshankar: We should use
1353 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1354 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1355/** @todo testcase: test prefixes and exceptions. currently not checking for the
1356 * OPSIZE one ... */
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1358 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1359 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1360 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1361 iemCImpl_xgetbv);
1362 }
1363 IEMOP_RAISE_INVALID_OPCODE_RET();
1364}
1365
1366
1367/** Opcode 0x0f 0x01 0xd1. */
1368FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1369{
1370 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1371 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1372 {
1373 /** @todo r=ramshankar: We should use
1374 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1375 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1376/** @todo testcase: test prefixes and exceptions. currently not checking for the
1377 * OPSIZE one ... */
1378 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1379 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1380 }
1381 IEMOP_RAISE_INVALID_OPCODE_RET();
1382}
1383
1384
1385/** Opcode 0x0f 0x01 /3. */
1386FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1387{
1388 IEMOP_MNEMONIC(lidt, "lidt");
1389 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1390 IEM_MC_BEGIN(0, 0);
1391 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1394 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1395 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1396 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1397 IEM_MC_END();
1398}
1399
1400
1401/** Opcode 0x0f 0x01 0xd8. */
1402#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1403FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1404{
1405 IEMOP_MNEMONIC(vmrun, "vmrun");
1406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1407 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1408 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1409 iemCImpl_vmrun);
1410}
1411#else
1412FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1413#endif
1414
1415/** Opcode 0x0f 0x01 0xd9. */
1416FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1417{
1418 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1419 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1420 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1421 * here cannot be right... */
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1423
1424 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1425 want all hypercalls regardless of instruction used, and if a
1426 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1427 (NEM/win makes ASSUMPTIONS about this behavior.) */
1428 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1429}
1430
1431/** Opcode 0x0f 0x01 0xda. */
1432#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1433FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1434{
1435 IEMOP_MNEMONIC(vmload, "vmload");
1436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1437 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1438}
1439#else
1440FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1441#endif
1442
1443
1444/** Opcode 0x0f 0x01 0xdb. */
1445#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1446FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1447{
1448 IEMOP_MNEMONIC(vmsave, "vmsave");
1449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1450 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1451}
1452#else
1453FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1454#endif
1455
1456
1457/** Opcode 0x0f 0x01 0xdc. */
1458#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1459FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1460{
1461 IEMOP_MNEMONIC(stgi, "stgi");
1462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1463 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1464}
1465#else
1466FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1467#endif
1468
1469
1470/** Opcode 0x0f 0x01 0xdd. */
1471#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1472FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1473{
1474 IEMOP_MNEMONIC(clgi, "clgi");
1475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1476 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1477}
1478#else
1479FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1480#endif
1481
1482
1483/** Opcode 0x0f 0x01 0xdf. */
1484#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1485FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1486{
1487 IEMOP_MNEMONIC(invlpga, "invlpga");
1488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1489 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1490}
1491#else
1492FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1493#endif
1494
1495
1496/** Opcode 0x0f 0x01 0xde. */
1497#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1498FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1499{
1500 IEMOP_MNEMONIC(skinit, "skinit");
1501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1502 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1503}
1504#else
1505FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1506#endif
1507
1508
1509/** Opcode 0x0f 0x01 /4. */
1510FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1511{
1512 IEMOP_MNEMONIC(smsw, "smsw");
1513 IEMOP_HLP_MIN_286();
1514 if (IEM_IS_MODRM_REG_MODE(bRm))
1515 {
1516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1517 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1518 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1519 }
1520
1521 /* Ignore operand size here, memory refs are always 16-bit. */
1522 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1523 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1526 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1527 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1528 IEM_MC_END();
1529}
1530
1531
1532/** Opcode 0x0f 0x01 /6. */
1533FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1534{
1535 /* The operand size is effectively ignored, all is 16-bit and only the
1536 lower 3-bits are used. */
1537 IEMOP_MNEMONIC(lmsw, "lmsw");
1538 IEMOP_HLP_MIN_286();
1539 if (IEM_IS_MODRM_REG_MODE(bRm))
1540 {
1541 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1543 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1544 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1545 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1546 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1547 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1548 IEM_MC_END();
1549 }
1550 else
1551 {
1552 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1554 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1558 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1559 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1560 IEM_MC_END();
1561 }
1562}
1563
1564
1565/** Opcode 0x0f 0x01 /7. */
1566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1567{
1568 IEMOP_MNEMONIC(invlpg, "invlpg");
1569 IEMOP_HLP_MIN_486();
1570 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1571 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1574 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1575 IEM_MC_END();
1576}
1577
1578
1579/** Opcode 0x0f 0x01 0xf8. */
1580FNIEMOP_DEF(iemOp_Grp7_swapgs)
1581{
1582 IEMOP_MNEMONIC(swapgs, "swapgs");
1583 IEMOP_HLP_ONLY_64BIT();
1584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1585 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1586}
1587
1588
1589/** Opcode 0x0f 0x01 0xf9. */
1590FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1591{
1592 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1594 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1595 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1596 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1597 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1598 iemCImpl_rdtscp);
1599}
1600
1601
1602/**
1603 * Group 7 jump table, memory variant.
1604 */
1605IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1606{
1607 iemOp_Grp7_sgdt,
1608 iemOp_Grp7_sidt,
1609 iemOp_Grp7_lgdt,
1610 iemOp_Grp7_lidt,
1611 iemOp_Grp7_smsw,
1612 iemOp_InvalidWithRM,
1613 iemOp_Grp7_lmsw,
1614 iemOp_Grp7_invlpg
1615};
1616
1617
1618/** Opcode 0x0f 0x01. */
1619FNIEMOP_DEF(iemOp_Grp7)
1620{
1621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1622 if (IEM_IS_MODRM_MEM_MODE(bRm))
1623 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1624
1625 switch (IEM_GET_MODRM_REG_8(bRm))
1626 {
1627 case 0:
1628 switch (IEM_GET_MODRM_RM_8(bRm))
1629 {
1630 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1631 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1632 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1633 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1634 }
1635 IEMOP_RAISE_INVALID_OPCODE_RET();
1636
1637 case 1:
1638 switch (IEM_GET_MODRM_RM_8(bRm))
1639 {
1640 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1641 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1642 }
1643 IEMOP_RAISE_INVALID_OPCODE_RET();
1644
1645 case 2:
1646 switch (IEM_GET_MODRM_RM_8(bRm))
1647 {
1648 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1649 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1650 }
1651 IEMOP_RAISE_INVALID_OPCODE_RET();
1652
1653 case 3:
1654 switch (IEM_GET_MODRM_RM_8(bRm))
1655 {
1656 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1657 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1658 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1659 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1660 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1661 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1662 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1663 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1665 }
1666
1667 case 4:
1668 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1669
1670 case 5:
1671 IEMOP_RAISE_INVALID_OPCODE_RET();
1672
1673 case 6:
1674 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1675
1676 case 7:
1677 switch (IEM_GET_MODRM_RM_8(bRm))
1678 {
1679 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1680 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1681 }
1682 IEMOP_RAISE_INVALID_OPCODE_RET();
1683
1684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1685 }
1686}
1687
1688FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1689{
1690 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1692
1693 if (IEM_IS_MODRM_REG_MODE(bRm))
1694 {
1695 switch (pVCpu->iem.s.enmEffOpSize)
1696 {
1697 case IEMMODE_16BIT:
1698 IEM_MC_BEGIN(0, 0);
1699 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1700 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1701 IEM_MC_ARG(uint16_t, u16Sel, 1);
1702 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1703
1704 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1705 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1706 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1707 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1708
1709 IEM_MC_END();
1710 break;
1711
1712 case IEMMODE_32BIT:
1713 case IEMMODE_64BIT:
1714 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1715 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1716 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1717 IEM_MC_ARG(uint16_t, u16Sel, 1);
1718 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1719
1720 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1721 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1722 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1723 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1724
1725 IEM_MC_END();
1726 break;
1727
1728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1729 }
1730 }
1731 else
1732 {
1733 switch (pVCpu->iem.s.enmEffOpSize)
1734 {
1735 case IEMMODE_16BIT:
1736 IEM_MC_BEGIN(0, 0);
1737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1738 IEM_MC_ARG(uint16_t, u16Sel, 1);
1739 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1741
1742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1743 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1744
1745 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1746 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1747 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1748 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1749
1750 IEM_MC_END();
1751 break;
1752
1753 case IEMMODE_32BIT:
1754 case IEMMODE_64BIT:
1755 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1756 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1757 IEM_MC_ARG(uint16_t, u16Sel, 1);
1758 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1760
1761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1762 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1763/** @todo testcase: make sure it's a 16-bit read. */
1764
1765 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1766 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1767 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1768 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1769
1770 IEM_MC_END();
1771 break;
1772
1773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1774 }
1775 }
1776}
1777
1778
1779
1780/**
1781 * @opcode 0x02
1782 * @opflmodify zf
1783 */
1784FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1785{
1786 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1787 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1788}
1789
1790
1791/**
1792 * @opcode 0x03
1793 * @opflmodify zf
1794 */
1795FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1796{
1797 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1798 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1799}
1800
1801
1802/** Opcode 0x0f 0x05. */
1803FNIEMOP_DEF(iemOp_syscall)
1804{
1805 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1807 /** @todo r=aeichner Clobbers cr0 only if this is a 286 LOADALL instruction. */
1808 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1809 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1810 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_syscall);
1811}
1812
1813
1814/** Opcode 0x0f 0x06. */
1815FNIEMOP_DEF(iemOp_clts)
1816{
1817 IEMOP_MNEMONIC(clts, "clts");
1818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1819 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1820}
1821
1822
1823/** Opcode 0x0f 0x07. */
1824FNIEMOP_DEF(iemOp_sysret)
1825{
1826 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1828 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1829 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1830 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1831}
1832
1833
1834/** Opcode 0x0f 0x08. */
1835FNIEMOP_DEF(iemOp_invd)
1836{
1837 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1838 IEMOP_HLP_MIN_486();
1839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1840 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1841}
1842
1843
1844/** Opcode 0x0f 0x09. */
1845FNIEMOP_DEF(iemOp_wbinvd)
1846{
1847 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1848 IEMOP_HLP_MIN_486();
1849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1850 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1851}
1852
1853
1854/** Opcode 0x0f 0x0b. */
1855FNIEMOP_DEF(iemOp_ud2)
1856{
1857 IEMOP_MNEMONIC(ud2, "ud2");
1858 IEMOP_RAISE_INVALID_OPCODE_RET();
1859}
1860
1861/** Opcode 0x0f 0x0d. */
1862FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1863{
1864 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1865 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1866 {
1867 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1868 IEMOP_RAISE_INVALID_OPCODE_RET();
1869 }
1870
1871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1872 if (IEM_IS_MODRM_REG_MODE(bRm))
1873 {
1874 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1875 IEMOP_RAISE_INVALID_OPCODE_RET();
1876 }
1877
1878 switch (IEM_GET_MODRM_REG_8(bRm))
1879 {
1880 case 2: /* Aliased to /0 for the time being. */
1881 case 4: /* Aliased to /0 for the time being. */
1882 case 5: /* Aliased to /0 for the time being. */
1883 case 6: /* Aliased to /0 for the time being. */
1884 case 7: /* Aliased to /0 for the time being. */
1885 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1886 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1887 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1889 }
1890
1891 IEM_MC_BEGIN(0, 0);
1892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1895 /* Currently a NOP. */
1896 IEM_MC_NOREF(GCPtrEffSrc);
1897 IEM_MC_ADVANCE_RIP_AND_FINISH();
1898 IEM_MC_END();
1899}
1900
1901
1902/** Opcode 0x0f 0x0e. */
1903FNIEMOP_DEF(iemOp_femms)
1904{
1905 IEMOP_MNEMONIC(femms, "femms");
1906
1907 IEM_MC_BEGIN(0, 0);
1908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1911 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1912 IEM_MC_FPU_FROM_MMX_MODE();
1913 IEM_MC_ADVANCE_RIP_AND_FINISH();
1914 IEM_MC_END();
1915}
1916
1917
1918/** Opcode 0x0f 0x0f. */
1919FNIEMOP_DEF(iemOp_3Dnow)
1920{
1921 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1922 {
1923 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1924 IEMOP_RAISE_INVALID_OPCODE_RET();
1925 }
1926
1927#ifdef IEM_WITH_3DNOW
1928 /* This is pretty sparse, use switch instead of table. */
1929 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1930 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1931#else
1932 IEMOP_BITCH_ABOUT_STUB();
1933 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1934#endif
1935}
1936
1937
1938/**
1939 * @opcode 0x10
1940 * @oppfx none
1941 * @opcpuid sse
1942 * @opgroup og_sse_simdfp_datamove
1943 * @opxcpttype 4UA
1944 * @optest op1=1 op2=2 -> op1=2
1945 * @optest op1=0 op2=-22 -> op1=-22
1946 */
1947FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1948{
1949 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1951 if (IEM_IS_MODRM_REG_MODE(bRm))
1952 {
1953 /*
1954 * XMM128, XMM128.
1955 */
1956 IEM_MC_BEGIN(0, 0);
1957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
1958 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1959 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1960 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1961 IEM_GET_MODRM_RM(pVCpu, bRm));
1962 IEM_MC_ADVANCE_RIP_AND_FINISH();
1963 IEM_MC_END();
1964 }
1965 else
1966 {
1967 /*
1968 * XMM128, [mem128].
1969 */
1970 IEM_MC_BEGIN(0, 0);
1971 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1973
1974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
1976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1977 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1978
1979 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1980 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1981
1982 IEM_MC_ADVANCE_RIP_AND_FINISH();
1983 IEM_MC_END();
1984 }
1985
1986}
1987
1988
1989/**
1990 * @opcode 0x10
1991 * @oppfx 0x66
1992 * @opcpuid sse2
1993 * @opgroup og_sse2_pcksclr_datamove
1994 * @opxcpttype 4UA
1995 * @optest op1=1 op2=2 -> op1=2
1996 * @optest op1=0 op2=-42 -> op1=-42
1997 */
1998FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1999{
2000 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2002 if (IEM_IS_MODRM_REG_MODE(bRm))
2003 {
2004 /*
2005 * XMM128, XMM128.
2006 */
2007 IEM_MC_BEGIN(0, 0);
2008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2009 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2010 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2011 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2012 IEM_GET_MODRM_RM(pVCpu, bRm));
2013 IEM_MC_ADVANCE_RIP_AND_FINISH();
2014 IEM_MC_END();
2015 }
2016 else
2017 {
2018 /*
2019 * XMM128, [mem128].
2020 */
2021 IEM_MC_BEGIN(0, 0);
2022 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2024
2025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2028 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2029
2030 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2031 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2032
2033 IEM_MC_ADVANCE_RIP_AND_FINISH();
2034 IEM_MC_END();
2035 }
2036}
2037
2038
2039/**
2040 * @opcode 0x10
2041 * @oppfx 0xf3
2042 * @opcpuid sse
2043 * @opgroup og_sse_simdfp_datamove
2044 * @opxcpttype 5
2045 * @optest op1=1 op2=2 -> op1=2
2046 * @optest op1=0 op2=-22 -> op1=-22
2047 */
2048FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2049{
2050 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052 if (IEM_IS_MODRM_REG_MODE(bRm))
2053 {
2054 /*
2055 * XMM32, XMM32.
2056 */
2057 IEM_MC_BEGIN(0, 0);
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2059 IEM_MC_LOCAL(uint32_t, uSrc);
2060
2061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2062 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2063 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2064 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2065
2066 IEM_MC_ADVANCE_RIP_AND_FINISH();
2067 IEM_MC_END();
2068 }
2069 else
2070 {
2071 /*
2072 * XMM128, [mem32].
2073 */
2074 IEM_MC_BEGIN(0, 0);
2075 IEM_MC_LOCAL(uint32_t, uSrc);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077
2078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2080 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2081 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2082
2083 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2084 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2085
2086 IEM_MC_ADVANCE_RIP_AND_FINISH();
2087 IEM_MC_END();
2088 }
2089}
2090
2091
2092/**
2093 * @opcode 0x10
2094 * @oppfx 0xf2
2095 * @opcpuid sse2
2096 * @opgroup og_sse2_pcksclr_datamove
2097 * @opxcpttype 5
2098 * @optest op1=1 op2=2 -> op1=2
2099 * @optest op1=0 op2=-42 -> op1=-42
2100 */
2101FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2102{
2103 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2105 if (IEM_IS_MODRM_REG_MODE(bRm))
2106 {
2107 /*
2108 * XMM64, XMM64.
2109 */
2110 IEM_MC_BEGIN(0, 0);
2111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2112 IEM_MC_LOCAL(uint64_t, uSrc);
2113
2114 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2115 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2116 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2117 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2118
2119 IEM_MC_ADVANCE_RIP_AND_FINISH();
2120 IEM_MC_END();
2121 }
2122 else
2123 {
2124 /*
2125 * XMM128, [mem64].
2126 */
2127 IEM_MC_BEGIN(0, 0);
2128 IEM_MC_LOCAL(uint64_t, uSrc);
2129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2130
2131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2133 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2135
2136 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2137 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2138
2139 IEM_MC_ADVANCE_RIP_AND_FINISH();
2140 IEM_MC_END();
2141 }
2142}
2143
2144
2145/**
2146 * @opcode 0x11
2147 * @oppfx none
2148 * @opcpuid sse
2149 * @opgroup og_sse_simdfp_datamove
2150 * @opxcpttype 4UA
2151 * @optest op1=1 op2=2 -> op1=2
2152 * @optest op1=0 op2=-42 -> op1=-42
2153 */
2154FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2155{
2156 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2158 if (IEM_IS_MODRM_REG_MODE(bRm))
2159 {
2160 /*
2161 * XMM128, XMM128.
2162 */
2163 IEM_MC_BEGIN(0, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2165 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2166 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2167 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2168 IEM_GET_MODRM_REG(pVCpu, bRm));
2169 IEM_MC_ADVANCE_RIP_AND_FINISH();
2170 IEM_MC_END();
2171 }
2172 else
2173 {
2174 /*
2175 * [mem128], XMM128.
2176 */
2177 IEM_MC_BEGIN(0, 0);
2178 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2180
2181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2183 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2185
2186 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2187 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2188
2189 IEM_MC_ADVANCE_RIP_AND_FINISH();
2190 IEM_MC_END();
2191 }
2192}
2193
2194
2195/**
2196 * @opcode 0x11
2197 * @oppfx 0x66
2198 * @opcpuid sse2
2199 * @opgroup og_sse2_pcksclr_datamove
2200 * @opxcpttype 4UA
2201 * @optest op1=1 op2=2 -> op1=2
2202 * @optest op1=0 op2=-42 -> op1=-42
2203 */
2204FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2205{
2206 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2208 if (IEM_IS_MODRM_REG_MODE(bRm))
2209 {
2210 /*
2211 * XMM128, XMM128.
2212 */
2213 IEM_MC_BEGIN(0, 0);
2214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2215 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2216 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2217 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2218 IEM_GET_MODRM_REG(pVCpu, bRm));
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 }
2222 else
2223 {
2224 /*
2225 * [mem128], XMM128.
2226 */
2227 IEM_MC_BEGIN(0, 0);
2228 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2230
2231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2233 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2234 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2235
2236 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2237 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2238
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 }
2242}
2243
2244
2245/**
2246 * @opcode 0x11
2247 * @oppfx 0xf3
2248 * @opcpuid sse
2249 * @opgroup og_sse_simdfp_datamove
2250 * @opxcpttype 5
2251 * @optest op1=1 op2=2 -> op1=2
2252 * @optest op1=0 op2=-22 -> op1=-22
2253 */
2254FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2255{
2256 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2258 if (IEM_IS_MODRM_REG_MODE(bRm))
2259 {
2260 /*
2261 * XMM32, XMM32.
2262 */
2263 IEM_MC_BEGIN(0, 0);
2264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2265 IEM_MC_LOCAL(uint32_t, uSrc);
2266
2267 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2268 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2269 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2270 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2271
2272 IEM_MC_ADVANCE_RIP_AND_FINISH();
2273 IEM_MC_END();
2274 }
2275 else
2276 {
2277 /*
2278 * [mem32], XMM32.
2279 */
2280 IEM_MC_BEGIN(0, 0);
2281 IEM_MC_LOCAL(uint32_t, uSrc);
2282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2283
2284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2286 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2287 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2288
2289 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2290 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2291
2292 IEM_MC_ADVANCE_RIP_AND_FINISH();
2293 IEM_MC_END();
2294 }
2295}
2296
2297
2298/**
2299 * @opcode 0x11
2300 * @oppfx 0xf2
2301 * @opcpuid sse2
2302 * @opgroup og_sse2_pcksclr_datamove
2303 * @opxcpttype 5
2304 * @optest op1=1 op2=2 -> op1=2
2305 * @optest op1=0 op2=-42 -> op1=-42
2306 */
2307FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2308{
2309 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2311 if (IEM_IS_MODRM_REG_MODE(bRm))
2312 {
2313 /*
2314 * XMM64, XMM64.
2315 */
2316 IEM_MC_BEGIN(0, 0);
2317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2318 IEM_MC_LOCAL(uint64_t, uSrc);
2319
2320 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2321 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2322 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2323 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2324
2325 IEM_MC_ADVANCE_RIP_AND_FINISH();
2326 IEM_MC_END();
2327 }
2328 else
2329 {
2330 /*
2331 * [mem64], XMM64.
2332 */
2333 IEM_MC_BEGIN(0, 0);
2334 IEM_MC_LOCAL(uint64_t, uSrc);
2335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2336
2337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2341
2342 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2343 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2344
2345 IEM_MC_ADVANCE_RIP_AND_FINISH();
2346 IEM_MC_END();
2347 }
2348}
2349
2350
2351FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2352{
2353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2354 if (IEM_IS_MODRM_REG_MODE(bRm))
2355 {
2356 /**
2357 * @opcode 0x12
2358 * @opcodesub 11 mr/reg
2359 * @oppfx none
2360 * @opcpuid sse
2361 * @opgroup og_sse_simdfp_datamove
2362 * @opxcpttype 5
2363 * @optest op1=1 op2=2 -> op1=2
2364 * @optest op1=0 op2=-42 -> op1=-42
2365 */
2366 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2367
2368 IEM_MC_BEGIN(0, 0);
2369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2370 IEM_MC_LOCAL(uint64_t, uSrc);
2371
2372 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2373 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2374 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2375 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2376
2377 IEM_MC_ADVANCE_RIP_AND_FINISH();
2378 IEM_MC_END();
2379 }
2380 else
2381 {
2382 /**
2383 * @opdone
2384 * @opcode 0x12
2385 * @opcodesub !11 mr/reg
2386 * @oppfx none
2387 * @opcpuid sse
2388 * @opgroup og_sse_simdfp_datamove
2389 * @opxcpttype 5
2390 * @optest op1=1 op2=2 -> op1=2
2391 * @optest op1=0 op2=-42 -> op1=-42
2392 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2393 */
2394 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2395
2396 IEM_MC_BEGIN(0, 0);
2397 IEM_MC_LOCAL(uint64_t, uSrc);
2398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2399
2400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2402 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2403 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2404
2405 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2406 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2407
2408 IEM_MC_ADVANCE_RIP_AND_FINISH();
2409 IEM_MC_END();
2410 }
2411}
2412
2413
2414/**
2415 * @opcode 0x12
2416 * @opcodesub !11 mr/reg
2417 * @oppfx 0x66
2418 * @opcpuid sse2
2419 * @opgroup og_sse2_pcksclr_datamove
2420 * @opxcpttype 5
2421 * @optest op1=1 op2=2 -> op1=2
2422 * @optest op1=0 op2=-42 -> op1=-42
2423 */
2424FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2425{
2426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2427 if (IEM_IS_MODRM_MEM_MODE(bRm))
2428 {
2429 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2430
2431 IEM_MC_BEGIN(0, 0);
2432 IEM_MC_LOCAL(uint64_t, uSrc);
2433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2434
2435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2439
2440 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2441 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2442
2443 IEM_MC_ADVANCE_RIP_AND_FINISH();
2444 IEM_MC_END();
2445 }
2446
2447 /**
2448 * @opdone
2449 * @opmnemonic ud660f12m3
2450 * @opcode 0x12
2451 * @opcodesub 11 mr/reg
2452 * @oppfx 0x66
2453 * @opunused immediate
2454 * @opcpuid sse
2455 * @optest ->
2456 */
2457 else
2458 IEMOP_RAISE_INVALID_OPCODE_RET();
2459}
2460
2461
2462/**
2463 * @opcode 0x12
2464 * @oppfx 0xf3
2465 * @opcpuid sse3
2466 * @opgroup og_sse3_pcksclr_datamove
2467 * @opxcpttype 4
2468 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2469 * op1=0x00000002000000020000000100000001
2470 */
2471FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2472{
2473 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2475 if (IEM_IS_MODRM_REG_MODE(bRm))
2476 {
2477 /*
2478 * XMM, XMM.
2479 */
2480 IEM_MC_BEGIN(0, 0);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2482 IEM_MC_LOCAL(RTUINT128U, uSrc);
2483
2484 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2485 IEM_MC_PREPARE_SSE_USAGE();
2486
2487 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2488 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2489 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2490 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2491 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2492
2493 IEM_MC_ADVANCE_RIP_AND_FINISH();
2494 IEM_MC_END();
2495 }
2496 else
2497 {
2498 /*
2499 * XMM, [mem128].
2500 */
2501 IEM_MC_BEGIN(0, 0);
2502 IEM_MC_LOCAL(RTUINT128U, uSrc);
2503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2504
2505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2507 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2508 IEM_MC_PREPARE_SSE_USAGE();
2509
2510 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2511 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2512 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2513 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2514 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2515
2516 IEM_MC_ADVANCE_RIP_AND_FINISH();
2517 IEM_MC_END();
2518 }
2519}
2520
2521
2522/**
2523 * @opcode 0x12
2524 * @oppfx 0xf2
2525 * @opcpuid sse3
2526 * @opgroup og_sse3_pcksclr_datamove
2527 * @opxcpttype 5
2528 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2529 * op1=0x22222222111111112222222211111111
2530 */
2531FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2532{
2533 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2535 if (IEM_IS_MODRM_REG_MODE(bRm))
2536 {
2537 /*
2538 * XMM128, XMM64.
2539 */
2540 IEM_MC_BEGIN(0, 0);
2541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2542 IEM_MC_LOCAL(uint64_t, uSrc);
2543
2544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2545 IEM_MC_PREPARE_SSE_USAGE();
2546
2547 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2548 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2549 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2550
2551 IEM_MC_ADVANCE_RIP_AND_FINISH();
2552 IEM_MC_END();
2553 }
2554 else
2555 {
2556 /*
2557 * XMM128, [mem64].
2558 */
2559 IEM_MC_BEGIN(0, 0);
2560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2561 IEM_MC_LOCAL(uint64_t, uSrc);
2562
2563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2566 IEM_MC_PREPARE_SSE_USAGE();
2567
2568 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2569 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2570 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2571
2572 IEM_MC_ADVANCE_RIP_AND_FINISH();
2573 IEM_MC_END();
2574 }
2575}
2576
2577
2578/**
2579 * @opcode 0x13
2580 * @opcodesub !11 mr/reg
2581 * @oppfx none
2582 * @opcpuid sse
2583 * @opgroup og_sse_simdfp_datamove
2584 * @opxcpttype 5
2585 * @optest op1=1 op2=2 -> op1=2
2586 * @optest op1=0 op2=-42 -> op1=-42
2587 */
2588FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2589{
2590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2591 if (IEM_IS_MODRM_MEM_MODE(bRm))
2592 {
2593 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2594
2595 IEM_MC_BEGIN(0, 0);
2596 IEM_MC_LOCAL(uint64_t, uSrc);
2597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2598
2599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2601 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2602 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2603
2604 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2605 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2606
2607 IEM_MC_ADVANCE_RIP_AND_FINISH();
2608 IEM_MC_END();
2609 }
2610
2611 /**
2612 * @opdone
2613 * @opmnemonic ud0f13m3
2614 * @opcode 0x13
2615 * @opcodesub 11 mr/reg
2616 * @oppfx none
2617 * @opunused immediate
2618 * @opcpuid sse
2619 * @optest ->
2620 */
2621 else
2622 IEMOP_RAISE_INVALID_OPCODE_RET();
2623}
2624
2625
2626/**
2627 * @opcode 0x13
2628 * @opcodesub !11 mr/reg
2629 * @oppfx 0x66
2630 * @opcpuid sse2
2631 * @opgroup og_sse2_pcksclr_datamove
2632 * @opxcpttype 5
2633 * @optest op1=1 op2=2 -> op1=2
2634 * @optest op1=0 op2=-42 -> op1=-42
2635 */
2636FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2637{
2638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2639 if (IEM_IS_MODRM_MEM_MODE(bRm))
2640 {
2641 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2642
2643 IEM_MC_BEGIN(0, 0);
2644 IEM_MC_LOCAL(uint64_t, uSrc);
2645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2646
2647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2649 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2650 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2651
2652 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2653 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2654
2655 IEM_MC_ADVANCE_RIP_AND_FINISH();
2656 IEM_MC_END();
2657 }
2658
2659 /**
2660 * @opdone
2661 * @opmnemonic ud660f13m3
2662 * @opcode 0x13
2663 * @opcodesub 11 mr/reg
2664 * @oppfx 0x66
2665 * @opunused immediate
2666 * @opcpuid sse
2667 * @optest ->
2668 */
2669 else
2670 IEMOP_RAISE_INVALID_OPCODE_RET();
2671}
2672
2673
2674/**
2675 * @opmnemonic udf30f13
2676 * @opcode 0x13
2677 * @oppfx 0xf3
2678 * @opunused intel-modrm
2679 * @opcpuid sse
2680 * @optest ->
2681 * @opdone
2682 */
2683
2684/**
2685 * @opmnemonic udf20f13
2686 * @opcode 0x13
2687 * @oppfx 0xf2
2688 * @opunused intel-modrm
2689 * @opcpuid sse
2690 * @optest ->
2691 * @opdone
2692 */
2693
2694/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2695FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2696{
2697 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2698 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2699}
2700
2701
2702/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2703FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2704{
2705 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2706 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2707}
2708
2709
2710/**
2711 * @opdone
2712 * @opmnemonic udf30f14
2713 * @opcode 0x14
2714 * @oppfx 0xf3
2715 * @opunused intel-modrm
2716 * @opcpuid sse
2717 * @optest ->
2718 * @opdone
2719 */
2720
2721/**
2722 * @opmnemonic udf20f14
2723 * @opcode 0x14
2724 * @oppfx 0xf2
2725 * @opunused intel-modrm
2726 * @opcpuid sse
2727 * @optest ->
2728 * @opdone
2729 */
2730
2731/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2732FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2733{
2734 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2735 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2736}
2737
2738
2739/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2740FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2741{
2742 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2743 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2744}
2745
2746
2747/* Opcode 0xf3 0x0f 0x15 - invalid */
2748/* Opcode 0xf2 0x0f 0x15 - invalid */
2749
2750/**
2751 * @opdone
2752 * @opmnemonic udf30f15
2753 * @opcode 0x15
2754 * @oppfx 0xf3
2755 * @opunused intel-modrm
2756 * @opcpuid sse
2757 * @optest ->
2758 * @opdone
2759 */
2760
2761/**
2762 * @opmnemonic udf20f15
2763 * @opcode 0x15
2764 * @oppfx 0xf2
2765 * @opunused intel-modrm
2766 * @opcpuid sse
2767 * @optest ->
2768 * @opdone
2769 */
2770
2771FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2772{
2773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2774 if (IEM_IS_MODRM_REG_MODE(bRm))
2775 {
2776 /**
2777 * @opcode 0x16
2778 * @opcodesub 11 mr/reg
2779 * @oppfx none
2780 * @opcpuid sse
2781 * @opgroup og_sse_simdfp_datamove
2782 * @opxcpttype 5
2783 * @optest op1=1 op2=2 -> op1=2
2784 * @optest op1=0 op2=-42 -> op1=-42
2785 */
2786 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2787
2788 IEM_MC_BEGIN(0, 0);
2789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2790 IEM_MC_LOCAL(uint64_t, uSrc);
2791
2792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2793 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2794 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2795 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2796
2797 IEM_MC_ADVANCE_RIP_AND_FINISH();
2798 IEM_MC_END();
2799 }
2800 else
2801 {
2802 /**
2803 * @opdone
2804 * @opcode 0x16
2805 * @opcodesub !11 mr/reg
2806 * @oppfx none
2807 * @opcpuid sse
2808 * @opgroup og_sse_simdfp_datamove
2809 * @opxcpttype 5
2810 * @optest op1=1 op2=2 -> op1=2
2811 * @optest op1=0 op2=-42 -> op1=-42
2812 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2813 */
2814 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2815
2816 IEM_MC_BEGIN(0, 0);
2817 IEM_MC_LOCAL(uint64_t, uSrc);
2818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2819
2820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2822 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2823 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2824
2825 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2826 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2827
2828 IEM_MC_ADVANCE_RIP_AND_FINISH();
2829 IEM_MC_END();
2830 }
2831}
2832
2833
2834/**
2835 * @opcode 0x16
2836 * @opcodesub !11 mr/reg
2837 * @oppfx 0x66
2838 * @opcpuid sse2
2839 * @opgroup og_sse2_pcksclr_datamove
2840 * @opxcpttype 5
2841 * @optest op1=1 op2=2 -> op1=2
2842 * @optest op1=0 op2=-42 -> op1=-42
2843 */
2844FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2845{
2846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2847 if (IEM_IS_MODRM_MEM_MODE(bRm))
2848 {
2849 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2850
2851 IEM_MC_BEGIN(0, 0);
2852 IEM_MC_LOCAL(uint64_t, uSrc);
2853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2854
2855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2857 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2858 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2859
2860 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2861 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2862
2863 IEM_MC_ADVANCE_RIP_AND_FINISH();
2864 IEM_MC_END();
2865 }
2866
2867 /**
2868 * @opdone
2869 * @opmnemonic ud660f16m3
2870 * @opcode 0x16
2871 * @opcodesub 11 mr/reg
2872 * @oppfx 0x66
2873 * @opunused immediate
2874 * @opcpuid sse
2875 * @optest ->
2876 */
2877 else
2878 IEMOP_RAISE_INVALID_OPCODE_RET();
2879}
2880
2881
2882/**
2883 * @opcode 0x16
2884 * @oppfx 0xf3
2885 * @opcpuid sse3
2886 * @opgroup og_sse3_pcksclr_datamove
2887 * @opxcpttype 4
2888 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2889 * op1=0x00000002000000020000000100000001
2890 */
2891FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2892{
2893 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2895 if (IEM_IS_MODRM_REG_MODE(bRm))
2896 {
2897 /*
2898 * XMM128, XMM128.
2899 */
2900 IEM_MC_BEGIN(0, 0);
2901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2902 IEM_MC_LOCAL(RTUINT128U, uSrc);
2903
2904 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2905 IEM_MC_PREPARE_SSE_USAGE();
2906
2907 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2908 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2909 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2910 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2911 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2912
2913 IEM_MC_ADVANCE_RIP_AND_FINISH();
2914 IEM_MC_END();
2915 }
2916 else
2917 {
2918 /*
2919 * XMM128, [mem128].
2920 */
2921 IEM_MC_BEGIN(0, 0);
2922 IEM_MC_LOCAL(RTUINT128U, uSrc);
2923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2924
2925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2927 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2928 IEM_MC_PREPARE_SSE_USAGE();
2929
2930 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2931 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2932 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2933 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2934 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2935
2936 IEM_MC_ADVANCE_RIP_AND_FINISH();
2937 IEM_MC_END();
2938 }
2939}
2940
2941/**
2942 * @opdone
2943 * @opmnemonic udf30f16
2944 * @opcode 0x16
2945 * @oppfx 0xf2
2946 * @opunused intel-modrm
2947 * @opcpuid sse
2948 * @optest ->
2949 * @opdone
2950 */
2951
2952
2953/**
2954 * @opcode 0x17
2955 * @opcodesub !11 mr/reg
2956 * @oppfx none
2957 * @opcpuid sse
2958 * @opgroup og_sse_simdfp_datamove
2959 * @opxcpttype 5
2960 * @optest op1=1 op2=2 -> op1=2
2961 * @optest op1=0 op2=-42 -> op1=-42
2962 */
2963FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2964{
2965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2966 if (IEM_IS_MODRM_MEM_MODE(bRm))
2967 {
2968 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2969
2970 IEM_MC_BEGIN(0, 0);
2971 IEM_MC_LOCAL(uint64_t, uSrc);
2972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2973
2974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2977 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2978
2979 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
2980 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2981
2982 IEM_MC_ADVANCE_RIP_AND_FINISH();
2983 IEM_MC_END();
2984 }
2985
2986 /**
2987 * @opdone
2988 * @opmnemonic ud0f17m3
2989 * @opcode 0x17
2990 * @opcodesub 11 mr/reg
2991 * @oppfx none
2992 * @opunused immediate
2993 * @opcpuid sse
2994 * @optest ->
2995 */
2996 else
2997 IEMOP_RAISE_INVALID_OPCODE_RET();
2998}
2999
3000
3001/**
3002 * @opcode 0x17
3003 * @opcodesub !11 mr/reg
3004 * @oppfx 0x66
3005 * @opcpuid sse2
3006 * @opgroup og_sse2_pcksclr_datamove
3007 * @opxcpttype 5
3008 * @optest op1=1 op2=2 -> op1=2
3009 * @optest op1=0 op2=-42 -> op1=-42
3010 */
3011FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3012{
3013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3014 if (IEM_IS_MODRM_MEM_MODE(bRm))
3015 {
3016 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3017
3018 IEM_MC_BEGIN(0, 0);
3019 IEM_MC_LOCAL(uint64_t, uSrc);
3020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3021
3022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3024 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3025 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3026
3027 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3028 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3029
3030 IEM_MC_ADVANCE_RIP_AND_FINISH();
3031 IEM_MC_END();
3032 }
3033
3034 /**
3035 * @opdone
3036 * @opmnemonic ud660f17m3
3037 * @opcode 0x17
3038 * @opcodesub 11 mr/reg
3039 * @oppfx 0x66
3040 * @opunused immediate
3041 * @opcpuid sse
3042 * @optest ->
3043 */
3044 else
3045 IEMOP_RAISE_INVALID_OPCODE_RET();
3046}
3047
3048
3049/**
3050 * @opdone
3051 * @opmnemonic udf30f17
3052 * @opcode 0x17
3053 * @oppfx 0xf3
3054 * @opunused intel-modrm
3055 * @opcpuid sse
3056 * @optest ->
3057 * @opdone
3058 */
3059
3060/**
3061 * @opmnemonic udf20f17
3062 * @opcode 0x17
3063 * @oppfx 0xf2
3064 * @opunused intel-modrm
3065 * @opcpuid sse
3066 * @optest ->
3067 * @opdone
3068 */
3069
3070
3071/** Opcode 0x0f 0x18. */
3072FNIEMOP_DEF(iemOp_prefetch_Grp16)
3073{
3074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3075 if (IEM_IS_MODRM_MEM_MODE(bRm))
3076 {
3077 switch (IEM_GET_MODRM_REG_8(bRm))
3078 {
3079 case 4: /* Aliased to /0 for the time being according to AMD. */
3080 case 5: /* Aliased to /0 for the time being according to AMD. */
3081 case 6: /* Aliased to /0 for the time being according to AMD. */
3082 case 7: /* Aliased to /0 for the time being according to AMD. */
3083 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3084 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3085 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3086 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3088 }
3089
3090 IEM_MC_BEGIN(0, 0);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094 /* Currently a NOP. */
3095 IEM_MC_NOREF(GCPtrEffSrc);
3096 IEM_MC_ADVANCE_RIP_AND_FINISH();
3097 IEM_MC_END();
3098 }
3099 else
3100 IEMOP_RAISE_INVALID_OPCODE_RET();
3101}
3102
3103
3104/** Opcode 0x0f 0x19..0x1f. */
3105FNIEMOP_DEF(iemOp_nop_Ev)
3106{
3107 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3109 if (IEM_IS_MODRM_REG_MODE(bRm))
3110 {
3111 IEM_MC_BEGIN(0, 0);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_ADVANCE_RIP_AND_FINISH();
3114 IEM_MC_END();
3115 }
3116 else
3117 {
3118 IEM_MC_BEGIN(0, 0);
3119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3122 /* Currently a NOP. */
3123 IEM_MC_NOREF(GCPtrEffSrc);
3124 IEM_MC_ADVANCE_RIP_AND_FINISH();
3125 IEM_MC_END();
3126 }
3127}
3128
3129
3130/** Opcode 0x0f 0x20. */
3131FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3132{
3133 /* mod is ignored, as is operand size overrides. */
3134/** @todo testcase: check memory encoding. */
3135 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3136 IEMOP_HLP_MIN_386();
3137 if (IEM_IS_64BIT_CODE(pVCpu))
3138 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3139 else
3140 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3141
3142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3143 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3144 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3145 {
3146 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3147 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3148 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3149 iCrReg |= 8;
3150 }
3151 switch (iCrReg)
3152 {
3153 case 0: case 2: case 3: case 4: case 8:
3154 break;
3155 default:
3156 IEMOP_RAISE_INVALID_OPCODE_RET();
3157 }
3158 IEMOP_HLP_DONE_DECODING();
3159
3160 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3161 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3162 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3163}
3164
3165
3166/** Opcode 0x0f 0x21. */
3167FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3168{
3169/** @todo testcase: check memory encoding. */
3170 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3171 IEMOP_HLP_MIN_386();
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3174 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3175 IEMOP_RAISE_INVALID_OPCODE_RET();
3176 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3177 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3178 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3179}
3180
3181
3182/** Opcode 0x0f 0x22. */
3183FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3184{
3185 /* mod is ignored, as is operand size overrides. */
3186 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3187 IEMOP_HLP_MIN_386();
3188 if (IEM_IS_64BIT_CODE(pVCpu))
3189 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3190 else
3191 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3192
3193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3194 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3195 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3196 {
3197 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3198 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3199 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3200 iCrReg |= 8;
3201 }
3202 switch (iCrReg)
3203 {
3204 case 0: case 2: case 3: case 4: case 8:
3205 break;
3206 default:
3207 IEMOP_RAISE_INVALID_OPCODE_RET();
3208 }
3209 IEMOP_HLP_DONE_DECODING();
3210
3211 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3212 if (iCrReg & (2 | 8))
3213 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3214 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3215 else
3216 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3217 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3218}
3219
3220
3221/** Opcode 0x0f 0x23. */
3222FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3223{
3224 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3225 IEMOP_HLP_MIN_386();
3226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3229 IEMOP_RAISE_INVALID_OPCODE_RET();
3230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3231 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3232}
3233
3234
3235/** Opcode 0x0f 0x24. */
3236FNIEMOP_DEF(iemOp_mov_Rd_Td)
3237{
3238 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3239 IEMOP_HLP_MIN_386();
3240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3242 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3243 IEMOP_RAISE_INVALID_OPCODE_RET();
3244 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3245 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3246}
3247
3248
3249/** Opcode 0x0f 0x26. */
3250FNIEMOP_DEF(iemOp_mov_Td_Rd)
3251{
3252 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3253 IEMOP_HLP_MIN_386();
3254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3256 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3257 IEMOP_RAISE_INVALID_OPCODE_RET();
3258 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3259}
3260
3261
3262/**
3263 * @opcode 0x28
3264 * @oppfx none
3265 * @opcpuid sse
3266 * @opgroup og_sse_simdfp_datamove
3267 * @opxcpttype 1
3268 * @optest op1=1 op2=2 -> op1=2
3269 * @optest op1=0 op2=-42 -> op1=-42
3270 */
3271FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3272{
3273 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3275 if (IEM_IS_MODRM_REG_MODE(bRm))
3276 {
3277 /*
3278 * Register, register.
3279 */
3280 IEM_MC_BEGIN(0, 0);
3281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3282 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3283 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3284 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3285 IEM_GET_MODRM_RM(pVCpu, bRm));
3286 IEM_MC_ADVANCE_RIP_AND_FINISH();
3287 IEM_MC_END();
3288 }
3289 else
3290 {
3291 /*
3292 * Register, memory.
3293 */
3294 IEM_MC_BEGIN(0, 0);
3295 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3300 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3301 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3302
3303 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3304 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3305
3306 IEM_MC_ADVANCE_RIP_AND_FINISH();
3307 IEM_MC_END();
3308 }
3309}
3310
3311/**
3312 * @opcode 0x28
3313 * @oppfx 66
3314 * @opcpuid sse2
3315 * @opgroup og_sse2_pcksclr_datamove
3316 * @opxcpttype 1
3317 * @optest op1=1 op2=2 -> op1=2
3318 * @optest op1=0 op2=-42 -> op1=-42
3319 */
3320FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3321{
3322 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3324 if (IEM_IS_MODRM_REG_MODE(bRm))
3325 {
3326 /*
3327 * Register, register.
3328 */
3329 IEM_MC_BEGIN(0, 0);
3330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3331 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3332 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3333 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3334 IEM_GET_MODRM_RM(pVCpu, bRm));
3335 IEM_MC_ADVANCE_RIP_AND_FINISH();
3336 IEM_MC_END();
3337 }
3338 else
3339 {
3340 /*
3341 * Register, memory.
3342 */
3343 IEM_MC_BEGIN(0, 0);
3344 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3346
3347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3350 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3351
3352 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3353 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3354
3355 IEM_MC_ADVANCE_RIP_AND_FINISH();
3356 IEM_MC_END();
3357 }
3358}
3359
3360/* Opcode 0xf3 0x0f 0x28 - invalid */
3361/* Opcode 0xf2 0x0f 0x28 - invalid */
3362
3363/**
3364 * @opcode 0x29
3365 * @oppfx none
3366 * @opcpuid sse
3367 * @opgroup og_sse_simdfp_datamove
3368 * @opxcpttype 1
3369 * @optest op1=1 op2=2 -> op1=2
3370 * @optest op1=0 op2=-42 -> op1=-42
3371 */
3372FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3373{
3374 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3376 if (IEM_IS_MODRM_REG_MODE(bRm))
3377 {
3378 /*
3379 * Register, register.
3380 */
3381 IEM_MC_BEGIN(0, 0);
3382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3385 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3386 IEM_GET_MODRM_REG(pVCpu, bRm));
3387 IEM_MC_ADVANCE_RIP_AND_FINISH();
3388 IEM_MC_END();
3389 }
3390 else
3391 {
3392 /*
3393 * Memory, register.
3394 */
3395 IEM_MC_BEGIN(0, 0);
3396 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3398
3399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3401 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3403
3404 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3405 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3406
3407 IEM_MC_ADVANCE_RIP_AND_FINISH();
3408 IEM_MC_END();
3409 }
3410}
3411
3412/**
3413 * @opcode 0x29
3414 * @oppfx 66
3415 * @opcpuid sse2
3416 * @opgroup og_sse2_pcksclr_datamove
3417 * @opxcpttype 1
3418 * @optest op1=1 op2=2 -> op1=2
3419 * @optest op1=0 op2=-42 -> op1=-42
3420 */
3421FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3422{
3423 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3425 if (IEM_IS_MODRM_REG_MODE(bRm))
3426 {
3427 /*
3428 * Register, register.
3429 */
3430 IEM_MC_BEGIN(0, 0);
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3432 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3434 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3435 IEM_GET_MODRM_REG(pVCpu, bRm));
3436 IEM_MC_ADVANCE_RIP_AND_FINISH();
3437 IEM_MC_END();
3438 }
3439 else
3440 {
3441 /*
3442 * Memory, register.
3443 */
3444 IEM_MC_BEGIN(0, 0);
3445 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3447
3448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3450 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3451 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3452
3453 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3454 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3455
3456 IEM_MC_ADVANCE_RIP_AND_FINISH();
3457 IEM_MC_END();
3458 }
3459}
3460
3461/* Opcode 0xf3 0x0f 0x29 - invalid */
3462/* Opcode 0xf2 0x0f 0x29 - invalid */
3463
3464
3465/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3466FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3467{
3468 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3470 if (IEM_IS_MODRM_REG_MODE(bRm))
3471 {
3472 /*
3473 * XMM, MMX
3474 */
3475 IEM_MC_BEGIN(0, 0);
3476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3477 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3478 IEM_MC_LOCAL(X86XMMREG, Dst);
3479 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3480 IEM_MC_ARG(uint64_t, u64Src, 2);
3481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3482 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3483 IEM_MC_PREPARE_FPU_USAGE();
3484 IEM_MC_FPU_TO_MMX_MODE();
3485
3486 IEM_MC_REF_MXCSR(pfMxcsr);
3487 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3488 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3489
3490 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3491 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3492 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3493 } IEM_MC_ELSE() {
3494 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3495 } IEM_MC_ENDIF();
3496
3497 IEM_MC_ADVANCE_RIP_AND_FINISH();
3498 IEM_MC_END();
3499 }
3500 else
3501 {
3502 /*
3503 * XMM, [mem64]
3504 */
3505 IEM_MC_BEGIN(0, 0);
3506 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3507 IEM_MC_LOCAL(X86XMMREG, Dst);
3508 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3509 IEM_MC_ARG(uint64_t, u64Src, 2);
3510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3511
3512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3514 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3515 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3516 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3517
3518 IEM_MC_PREPARE_FPU_USAGE();
3519 IEM_MC_FPU_TO_MMX_MODE();
3520 IEM_MC_REF_MXCSR(pfMxcsr);
3521
3522 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3523 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3524 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3525 } IEM_MC_ELSE() {
3526 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3527 } IEM_MC_ENDIF();
3528
3529 IEM_MC_ADVANCE_RIP_AND_FINISH();
3530 IEM_MC_END();
3531 }
3532}
3533
3534
3535/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3536FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3537{
3538 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3540 if (IEM_IS_MODRM_REG_MODE(bRm))
3541 {
3542 /*
3543 * XMM, MMX
3544 */
3545 IEM_MC_BEGIN(0, 0);
3546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3547 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3548 IEM_MC_LOCAL(X86XMMREG, Dst);
3549 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3550 IEM_MC_ARG(uint64_t, u64Src, 2);
3551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3553 IEM_MC_PREPARE_FPU_USAGE();
3554 IEM_MC_FPU_TO_MMX_MODE();
3555
3556 IEM_MC_REF_MXCSR(pfMxcsr);
3557 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3558
3559 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3560 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3561 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3562 } IEM_MC_ELSE() {
3563 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3564 } IEM_MC_ENDIF();
3565
3566 IEM_MC_ADVANCE_RIP_AND_FINISH();
3567 IEM_MC_END();
3568 }
3569 else
3570 {
3571 /*
3572 * XMM, [mem64]
3573 */
3574 IEM_MC_BEGIN(0, 0);
3575 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3576 IEM_MC_LOCAL(X86XMMREG, Dst);
3577 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3578 IEM_MC_ARG(uint64_t, u64Src, 2);
3579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3580
3581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3583 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3584 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3585 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3586
3587 /* Doesn't cause a transition to MMX mode. */
3588 IEM_MC_PREPARE_SSE_USAGE();
3589 IEM_MC_REF_MXCSR(pfMxcsr);
3590
3591 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3592 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3593 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3594 } IEM_MC_ELSE() {
3595 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3596 } IEM_MC_ENDIF();
3597
3598 IEM_MC_ADVANCE_RIP_AND_FINISH();
3599 IEM_MC_END();
3600 }
3601}
3602
3603
3604/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3605FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3606{
3607 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3608
3609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3610 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3611 {
3612 if (IEM_IS_MODRM_REG_MODE(bRm))
3613 {
3614 /* XMM, greg64 */
3615 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3616 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3617 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3618 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3619
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3621 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3622 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3623
3624 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3625 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3626 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3627 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3628 } IEM_MC_ELSE() {
3629 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3630 } IEM_MC_ENDIF();
3631
3632 IEM_MC_ADVANCE_RIP_AND_FINISH();
3633 IEM_MC_END();
3634 }
3635 else
3636 {
3637 /* XMM, [mem64] */
3638 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3640 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3641 IEM_MC_LOCAL(int64_t, i64Src);
3642 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3643 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3644
3645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3648 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3649
3650 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3651 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3652 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3653 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3654 } IEM_MC_ELSE() {
3655 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3656 } IEM_MC_ENDIF();
3657
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 IEM_MC_END();
3660 }
3661 }
3662 else
3663 {
3664 if (IEM_IS_MODRM_REG_MODE(bRm))
3665 {
3666 /* greg, XMM */
3667 IEM_MC_BEGIN(0, 0);
3668 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3669 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3670 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3671
3672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3674 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3675
3676 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3677 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3678 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3679 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3680 } IEM_MC_ELSE() {
3681 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3682 } IEM_MC_ENDIF();
3683
3684 IEM_MC_ADVANCE_RIP_AND_FINISH();
3685 IEM_MC_END();
3686 }
3687 else
3688 {
3689 /* greg, [mem32] */
3690 IEM_MC_BEGIN(0, 0);
3691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3692 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3693 IEM_MC_LOCAL(int32_t, i32Src);
3694 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3695 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3696
3697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3699 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3700 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3701
3702 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3703 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3704 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3705 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3706 } IEM_MC_ELSE() {
3707 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3708 } IEM_MC_ENDIF();
3709
3710 IEM_MC_ADVANCE_RIP_AND_FINISH();
3711 IEM_MC_END();
3712 }
3713 }
3714}
3715
3716
3717/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3718FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3719{
3720 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3721
3722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3723 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3724 {
3725 if (IEM_IS_MODRM_REG_MODE(bRm))
3726 {
3727 /* XMM, greg64 */
3728 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3729 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3730 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3731 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3732
3733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3734 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3735 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3736
3737 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3738 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3739 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3740 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3741 } IEM_MC_ELSE() {
3742 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3743 } IEM_MC_ENDIF();
3744
3745 IEM_MC_ADVANCE_RIP_AND_FINISH();
3746 IEM_MC_END();
3747 }
3748 else
3749 {
3750 /* XMM, [mem64] */
3751 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3753 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3754 IEM_MC_LOCAL(int64_t, i64Src);
3755 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3756 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3757
3758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3760 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3761 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3762
3763 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3764 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3765 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3766 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3767 } IEM_MC_ELSE() {
3768 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3769 } IEM_MC_ENDIF();
3770
3771 IEM_MC_ADVANCE_RIP_AND_FINISH();
3772 IEM_MC_END();
3773 }
3774 }
3775 else
3776 {
3777 if (IEM_IS_MODRM_REG_MODE(bRm))
3778 {
3779 /* XMM, greg32 */
3780 IEM_MC_BEGIN(0, 0);
3781 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3782 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3783 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3784
3785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3786 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3787 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3788
3789 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3790 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3791 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3792 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3793 } IEM_MC_ELSE() {
3794 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3795 } IEM_MC_ENDIF();
3796
3797 IEM_MC_ADVANCE_RIP_AND_FINISH();
3798 IEM_MC_END();
3799 }
3800 else
3801 {
3802 /* XMM, [mem32] */
3803 IEM_MC_BEGIN(0, 0);
3804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3805 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3806 IEM_MC_LOCAL(int32_t, i32Src);
3807 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3808 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3809
3810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3812 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3813 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3814
3815 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3816 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3817 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3818 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3819 } IEM_MC_ELSE() {
3820 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3821 } IEM_MC_ENDIF();
3822
3823 IEM_MC_ADVANCE_RIP_AND_FINISH();
3824 IEM_MC_END();
3825 }
3826 }
3827}
3828
3829
3830/**
3831 * @opcode 0x2b
3832 * @opcodesub !11 mr/reg
3833 * @oppfx none
3834 * @opcpuid sse
3835 * @opgroup og_sse1_cachect
3836 * @opxcpttype 1
3837 * @optest op1=1 op2=2 -> op1=2
3838 * @optest op1=0 op2=-42 -> op1=-42
3839 */
3840FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3841{
3842 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3844 if (IEM_IS_MODRM_MEM_MODE(bRm))
3845 {
3846 /*
3847 * memory, register.
3848 */
3849 IEM_MC_BEGIN(0, 0);
3850 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3852
3853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3855 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3856 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3857
3858 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3859 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3860
3861 IEM_MC_ADVANCE_RIP_AND_FINISH();
3862 IEM_MC_END();
3863 }
3864 /* The register, register encoding is invalid. */
3865 else
3866 IEMOP_RAISE_INVALID_OPCODE_RET();
3867}
3868
3869/**
3870 * @opcode 0x2b
3871 * @opcodesub !11 mr/reg
3872 * @oppfx 0x66
3873 * @opcpuid sse2
3874 * @opgroup og_sse2_cachect
3875 * @opxcpttype 1
3876 * @optest op1=1 op2=2 -> op1=2
3877 * @optest op1=0 op2=-42 -> op1=-42
3878 */
3879FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3880{
3881 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3883 if (IEM_IS_MODRM_MEM_MODE(bRm))
3884 {
3885 /*
3886 * memory, register.
3887 */
3888 IEM_MC_BEGIN(0, 0);
3889 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3891
3892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3894 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3895 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3896
3897 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3898 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3899
3900 IEM_MC_ADVANCE_RIP_AND_FINISH();
3901 IEM_MC_END();
3902 }
3903 /* The register, register encoding is invalid. */
3904 else
3905 IEMOP_RAISE_INVALID_OPCODE_RET();
3906}
3907/* Opcode 0xf3 0x0f 0x2b - invalid */
3908/* Opcode 0xf2 0x0f 0x2b - invalid */
3909
3910
3911/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3912FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3913{
3914 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3916 if (IEM_IS_MODRM_REG_MODE(bRm))
3917 {
3918 /*
3919 * Register, register.
3920 */
3921 IEM_MC_BEGIN(0, 0);
3922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3923 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3924 IEM_MC_LOCAL(uint64_t, u64Dst);
3925 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
3926 IEM_MC_ARG(uint64_t, u64Src, 2);
3927 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3928 IEM_MC_PREPARE_FPU_USAGE();
3929 IEM_MC_FPU_TO_MMX_MODE();
3930
3931 IEM_MC_REF_MXCSR(pfMxcsr);
3932 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3933
3934 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
3935 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3936 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3937 } IEM_MC_ELSE() {
3938 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3939 } IEM_MC_ENDIF();
3940
3941 IEM_MC_ADVANCE_RIP_AND_FINISH();
3942 IEM_MC_END();
3943 }
3944 else
3945 {
3946 /*
3947 * Register, memory.
3948 */
3949 IEM_MC_BEGIN(0, 0);
3950 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3951 IEM_MC_LOCAL(uint64_t, u64Dst);
3952 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
3953 IEM_MC_ARG(uint64_t, u64Src, 2);
3954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3955
3956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3958 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3959 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3960
3961 IEM_MC_PREPARE_FPU_USAGE();
3962 IEM_MC_FPU_TO_MMX_MODE();
3963 IEM_MC_REF_MXCSR(pfMxcsr);
3964
3965 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
3966 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3967 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3968 } IEM_MC_ELSE() {
3969 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3970 } IEM_MC_ENDIF();
3971
3972 IEM_MC_ADVANCE_RIP_AND_FINISH();
3973 IEM_MC_END();
3974 }
3975}
3976
3977
3978/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3979FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
3980{
3981 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3983 if (IEM_IS_MODRM_REG_MODE(bRm))
3984 {
3985 /*
3986 * Register, register.
3987 */
3988 IEM_MC_BEGIN(0, 0);
3989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3990 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3991 IEM_MC_LOCAL(uint64_t, u64Dst);
3992 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
3993 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
3994 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3995 IEM_MC_PREPARE_FPU_USAGE();
3996 IEM_MC_FPU_TO_MMX_MODE();
3997
3998 IEM_MC_REF_MXCSR(pfMxcsr);
3999 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4000
4001 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4002 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4003 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4004 } IEM_MC_ELSE() {
4005 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4006 } IEM_MC_ENDIF();
4007
4008 IEM_MC_ADVANCE_RIP_AND_FINISH();
4009 IEM_MC_END();
4010 }
4011 else
4012 {
4013 /*
4014 * Register, memory.
4015 */
4016 IEM_MC_BEGIN(0, 0);
4017 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4018 IEM_MC_LOCAL(uint64_t, u64Dst);
4019 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4020 IEM_MC_LOCAL(X86XMMREG, uSrc);
4021 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4023
4024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4026 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4027 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4028
4029 IEM_MC_PREPARE_FPU_USAGE();
4030 IEM_MC_FPU_TO_MMX_MODE();
4031
4032 IEM_MC_REF_MXCSR(pfMxcsr);
4033
4034 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4035 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4036 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4037 } IEM_MC_ELSE() {
4038 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4039 } IEM_MC_ENDIF();
4040
4041 IEM_MC_ADVANCE_RIP_AND_FINISH();
4042 IEM_MC_END();
4043 }
4044}
4045
4046
4047/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4048FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4049{
4050 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4051
4052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4053 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4054 {
4055 if (IEM_IS_MODRM_REG_MODE(bRm))
4056 {
4057 /* greg64, XMM */
4058 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4059 IEM_MC_LOCAL(int64_t, i64Dst);
4060 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4061 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4062
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4064 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4065 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4066
4067 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4068 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4069 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4070 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4071 } IEM_MC_ELSE() {
4072 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4073 } IEM_MC_ENDIF();
4074
4075 IEM_MC_ADVANCE_RIP_AND_FINISH();
4076 IEM_MC_END();
4077 }
4078 else
4079 {
4080 /* greg64, [mem64] */
4081 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4083 IEM_MC_LOCAL(int64_t, i64Dst);
4084 IEM_MC_LOCAL(uint32_t, u32Src);
4085 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4086 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4087
4088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4090 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4091 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4092
4093 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4094 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4095 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4096 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4097 } IEM_MC_ELSE() {
4098 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4099 } IEM_MC_ENDIF();
4100
4101 IEM_MC_ADVANCE_RIP_AND_FINISH();
4102 IEM_MC_END();
4103 }
4104 }
4105 else
4106 {
4107 if (IEM_IS_MODRM_REG_MODE(bRm))
4108 {
4109 /* greg, XMM */
4110 IEM_MC_BEGIN(0, 0);
4111 IEM_MC_LOCAL(int32_t, i32Dst);
4112 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4113 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4114
4115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4117 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4118
4119 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4120 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4121 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4122 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4123 } IEM_MC_ELSE() {
4124 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4125 } IEM_MC_ENDIF();
4126
4127 IEM_MC_ADVANCE_RIP_AND_FINISH();
4128 IEM_MC_END();
4129 }
4130 else
4131 {
4132 /* greg, [mem] */
4133 IEM_MC_BEGIN(0, 0);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4135 IEM_MC_LOCAL(int32_t, i32Dst);
4136 IEM_MC_LOCAL(uint32_t, u32Src);
4137 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4138 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4139
4140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4142 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4143 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4144
4145 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4146 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4147 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4148 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4149 } IEM_MC_ELSE() {
4150 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4151 } IEM_MC_ENDIF();
4152
4153 IEM_MC_ADVANCE_RIP_AND_FINISH();
4154 IEM_MC_END();
4155 }
4156 }
4157}
4158
4159
4160/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4161FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4162{
4163 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4164
4165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4166 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4167 {
4168 if (IEM_IS_MODRM_REG_MODE(bRm))
4169 {
4170 /* greg64, XMM */
4171 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4172 IEM_MC_LOCAL(int64_t, i64Dst);
4173 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4174 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4175
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4178 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4179
4180 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4181 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4182 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4183 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4184 } IEM_MC_ELSE() {
4185 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4186 } IEM_MC_ENDIF();
4187
4188 IEM_MC_ADVANCE_RIP_AND_FINISH();
4189 IEM_MC_END();
4190 }
4191 else
4192 {
4193 /* greg64, [mem64] */
4194 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4196 IEM_MC_LOCAL(int64_t, i64Dst);
4197 IEM_MC_LOCAL(uint64_t, u64Src);
4198 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4199 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4200
4201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4203 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4204 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4205
4206 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4207 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4208 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4209 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4210 } IEM_MC_ELSE() {
4211 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4212 } IEM_MC_ENDIF();
4213
4214 IEM_MC_ADVANCE_RIP_AND_FINISH();
4215 IEM_MC_END();
4216 }
4217 }
4218 else
4219 {
4220 if (IEM_IS_MODRM_REG_MODE(bRm))
4221 {
4222 /* greg, XMM */
4223 IEM_MC_BEGIN(0, 0);
4224 IEM_MC_LOCAL(int32_t, i32Dst);
4225 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4226 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4227
4228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4230 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4231
4232 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4233 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4234 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4235 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4236 } IEM_MC_ELSE() {
4237 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4238 } IEM_MC_ENDIF();
4239
4240 IEM_MC_ADVANCE_RIP_AND_FINISH();
4241 IEM_MC_END();
4242 }
4243 else
4244 {
4245 /* greg32, [mem32] */
4246 IEM_MC_BEGIN(0, 0);
4247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4248 IEM_MC_LOCAL(int32_t, i32Dst);
4249 IEM_MC_LOCAL(uint64_t, u64Src);
4250 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4251 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4252
4253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4255 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4256 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4257
4258 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4259 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4260 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4261 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4262 } IEM_MC_ELSE() {
4263 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4264 } IEM_MC_ENDIF();
4265
4266 IEM_MC_ADVANCE_RIP_AND_FINISH();
4267 IEM_MC_END();
4268 }
4269 }
4270}
4271
4272
4273/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4274FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4275{
4276 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4278 if (IEM_IS_MODRM_REG_MODE(bRm))
4279 {
4280 /*
4281 * Register, register.
4282 */
4283 IEM_MC_BEGIN(0, 0);
4284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4285 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4286 IEM_MC_LOCAL(uint64_t, u64Dst);
4287 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4288 IEM_MC_ARG(uint64_t, u64Src, 2);
4289
4290 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4291 IEM_MC_PREPARE_FPU_USAGE();
4292 IEM_MC_FPU_TO_MMX_MODE();
4293
4294 IEM_MC_REF_MXCSR(pfMxcsr);
4295 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4296
4297 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4298 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4299 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4300 } IEM_MC_ELSE() {
4301 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4302 } IEM_MC_ENDIF();
4303
4304 IEM_MC_ADVANCE_RIP_AND_FINISH();
4305 IEM_MC_END();
4306 }
4307 else
4308 {
4309 /*
4310 * Register, memory.
4311 */
4312 IEM_MC_BEGIN(0, 0);
4313 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4314 IEM_MC_LOCAL(uint64_t, u64Dst);
4315 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4316 IEM_MC_ARG(uint64_t, u64Src, 2);
4317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4318
4319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4321 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4322 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4323
4324 IEM_MC_PREPARE_FPU_USAGE();
4325 IEM_MC_FPU_TO_MMX_MODE();
4326 IEM_MC_REF_MXCSR(pfMxcsr);
4327
4328 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4329 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4330 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4331 } IEM_MC_ELSE() {
4332 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4333 } IEM_MC_ENDIF();
4334
4335 IEM_MC_ADVANCE_RIP_AND_FINISH();
4336 IEM_MC_END();
4337 }
4338}
4339
4340
4341/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4342FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4343{
4344 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4346 if (IEM_IS_MODRM_REG_MODE(bRm))
4347 {
4348 /*
4349 * Register, register.
4350 */
4351 IEM_MC_BEGIN(0, 0);
4352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4353 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4354 IEM_MC_LOCAL(uint64_t, u64Dst);
4355 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4356 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4357
4358 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4359 IEM_MC_PREPARE_FPU_USAGE();
4360 IEM_MC_FPU_TO_MMX_MODE();
4361
4362 IEM_MC_REF_MXCSR(pfMxcsr);
4363 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4364
4365 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4366 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4367 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4368 } IEM_MC_ELSE() {
4369 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4370 } IEM_MC_ENDIF();
4371
4372 IEM_MC_ADVANCE_RIP_AND_FINISH();
4373 IEM_MC_END();
4374 }
4375 else
4376 {
4377 /*
4378 * Register, memory.
4379 */
4380 IEM_MC_BEGIN(0, 0);
4381 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4382 IEM_MC_LOCAL(uint64_t, u64Dst);
4383 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4384 IEM_MC_LOCAL(X86XMMREG, uSrc);
4385 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4387
4388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4390 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4391 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4392
4393 IEM_MC_PREPARE_FPU_USAGE();
4394 IEM_MC_FPU_TO_MMX_MODE();
4395
4396 IEM_MC_REF_MXCSR(pfMxcsr);
4397
4398 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4399 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4400 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4401 } IEM_MC_ELSE() {
4402 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4403 } IEM_MC_ENDIF();
4404
4405 IEM_MC_ADVANCE_RIP_AND_FINISH();
4406 IEM_MC_END();
4407 }
4408}
4409
4410
4411/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4412FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4413{
4414 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4415
4416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4417 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4418 {
4419 if (IEM_IS_MODRM_REG_MODE(bRm))
4420 {
4421 /* greg64, XMM */
4422 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4423 IEM_MC_LOCAL(int64_t, i64Dst);
4424 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4425 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4426
4427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4428 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4429 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4430
4431 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4432 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4433 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4434 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4435 } IEM_MC_ELSE() {
4436 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4437 } IEM_MC_ENDIF();
4438
4439 IEM_MC_ADVANCE_RIP_AND_FINISH();
4440 IEM_MC_END();
4441 }
4442 else
4443 {
4444 /* greg64, [mem64] */
4445 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4447 IEM_MC_LOCAL(int64_t, i64Dst);
4448 IEM_MC_LOCAL(uint32_t, u32Src);
4449 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4450 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4451
4452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4454 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4455 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4456
4457 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4458 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4459 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4460 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4461 } IEM_MC_ELSE() {
4462 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4463 } IEM_MC_ENDIF();
4464
4465 IEM_MC_ADVANCE_RIP_AND_FINISH();
4466 IEM_MC_END();
4467 }
4468 }
4469 else
4470 {
4471 if (IEM_IS_MODRM_REG_MODE(bRm))
4472 {
4473 /* greg, XMM */
4474 IEM_MC_BEGIN(0, 0);
4475 IEM_MC_LOCAL(int32_t, i32Dst);
4476 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4477 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4478
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4480 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4481 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4482
4483 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4484 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4485 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4486 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4487 } IEM_MC_ELSE() {
4488 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4489 } IEM_MC_ENDIF();
4490
4491 IEM_MC_ADVANCE_RIP_AND_FINISH();
4492 IEM_MC_END();
4493 }
4494 else
4495 {
4496 /* greg, [mem] */
4497 IEM_MC_BEGIN(0, 0);
4498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4499 IEM_MC_LOCAL(int32_t, i32Dst);
4500 IEM_MC_LOCAL(uint32_t, u32Src);
4501 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4502 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4503
4504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4507 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4508
4509 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4510 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4511 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4512 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4513 } IEM_MC_ELSE() {
4514 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4515 } IEM_MC_ENDIF();
4516
4517 IEM_MC_ADVANCE_RIP_AND_FINISH();
4518 IEM_MC_END();
4519 }
4520 }
4521}
4522
4523
4524/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4525FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4526{
4527 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4528
4529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4530 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4531 {
4532 if (IEM_IS_MODRM_REG_MODE(bRm))
4533 {
4534 /* greg64, XMM */
4535 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4536 IEM_MC_LOCAL(int64_t, i64Dst);
4537 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4538 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4539
4540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4541 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4542 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4543
4544 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4545 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4546 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4547 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4548 } IEM_MC_ELSE() {
4549 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4550 } IEM_MC_ENDIF();
4551
4552 IEM_MC_ADVANCE_RIP_AND_FINISH();
4553 IEM_MC_END();
4554 }
4555 else
4556 {
4557 /* greg64, [mem64] */
4558 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4560 IEM_MC_LOCAL(int64_t, i64Dst);
4561 IEM_MC_LOCAL(uint64_t, u64Src);
4562 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4563 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4564
4565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4567 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4568 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4569
4570 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4571 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4572 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4573 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4574 } IEM_MC_ELSE() {
4575 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4576 } IEM_MC_ENDIF();
4577
4578 IEM_MC_ADVANCE_RIP_AND_FINISH();
4579 IEM_MC_END();
4580 }
4581 }
4582 else
4583 {
4584 if (IEM_IS_MODRM_REG_MODE(bRm))
4585 {
4586 /* greg32, XMM */
4587 IEM_MC_BEGIN(0, 0);
4588 IEM_MC_LOCAL(int32_t, i32Dst);
4589 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4590 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4591
4592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4593 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4594 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4595
4596 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4597 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4598 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4599 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4600 } IEM_MC_ELSE() {
4601 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4602 } IEM_MC_ENDIF();
4603
4604 IEM_MC_ADVANCE_RIP_AND_FINISH();
4605 IEM_MC_END();
4606 }
4607 else
4608 {
4609 /* greg32, [mem64] */
4610 IEM_MC_BEGIN(0, 0);
4611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4612 IEM_MC_LOCAL(int32_t, i32Dst);
4613 IEM_MC_LOCAL(uint64_t, u64Src);
4614 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4615 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4616
4617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4619 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4620 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4621
4622 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4623 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4624 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4625 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4626 } IEM_MC_ELSE() {
4627 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4628 } IEM_MC_ENDIF();
4629
4630 IEM_MC_ADVANCE_RIP_AND_FINISH();
4631 IEM_MC_END();
4632 }
4633 }
4634}
4635
4636
4637/**
4638 * @opcode 0x2e
4639 * @oppfx none
4640 * @opflmodify cf,pf,af,zf,sf,of
4641 * @opflclear af,sf,of
4642 */
4643FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4644{
4645 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4647 if (IEM_IS_MODRM_REG_MODE(bRm))
4648 {
4649 /*
4650 * Register, register.
4651 */
4652 IEM_MC_BEGIN(0, 0);
4653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4654 IEM_MC_LOCAL(uint32_t, fEFlags);
4655 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4656 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4657 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4658 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4659 IEM_MC_PREPARE_SSE_USAGE();
4660 IEM_MC_FETCH_EFLAGS(fEFlags);
4661 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4662 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4663 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4664 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4665 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4666 } IEM_MC_ELSE() {
4667 IEM_MC_COMMIT_EFLAGS(fEFlags);
4668 } IEM_MC_ENDIF();
4669
4670 IEM_MC_ADVANCE_RIP_AND_FINISH();
4671 IEM_MC_END();
4672 }
4673 else
4674 {
4675 /*
4676 * Register, memory.
4677 */
4678 IEM_MC_BEGIN(0, 0);
4679 IEM_MC_LOCAL(uint32_t, fEFlags);
4680 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4681 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4682 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4684
4685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4687 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4688 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4689
4690 IEM_MC_PREPARE_SSE_USAGE();
4691 IEM_MC_FETCH_EFLAGS(fEFlags);
4692 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4693 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4694 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4695 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4696 } IEM_MC_ELSE() {
4697 IEM_MC_COMMIT_EFLAGS(fEFlags);
4698 } IEM_MC_ENDIF();
4699
4700 IEM_MC_ADVANCE_RIP_AND_FINISH();
4701 IEM_MC_END();
4702 }
4703}
4704
4705
4706/**
4707 * @opcode 0x2e
4708 * @oppfx 0x66
4709 * @opflmodify cf,pf,af,zf,sf,of
4710 * @opflclear af,sf,of
4711 */
4712FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4713{
4714 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4716 if (IEM_IS_MODRM_REG_MODE(bRm))
4717 {
4718 /*
4719 * Register, register.
4720 */
4721 IEM_MC_BEGIN(0, 0);
4722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4723 IEM_MC_LOCAL(uint32_t, fEFlags);
4724 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4725 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4726 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4727 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4728 IEM_MC_PREPARE_SSE_USAGE();
4729 IEM_MC_FETCH_EFLAGS(fEFlags);
4730 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4731 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4732 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4733 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4734 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4735 } IEM_MC_ELSE() {
4736 IEM_MC_COMMIT_EFLAGS(fEFlags);
4737 } IEM_MC_ENDIF();
4738
4739 IEM_MC_ADVANCE_RIP_AND_FINISH();
4740 IEM_MC_END();
4741 }
4742 else
4743 {
4744 /*
4745 * Register, memory.
4746 */
4747 IEM_MC_BEGIN(0, 0);
4748 IEM_MC_LOCAL(uint32_t, fEFlags);
4749 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4750 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4751 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4753
4754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4757 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4758
4759 IEM_MC_PREPARE_SSE_USAGE();
4760 IEM_MC_FETCH_EFLAGS(fEFlags);
4761 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4762 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4763 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4764 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4765 } IEM_MC_ELSE() {
4766 IEM_MC_COMMIT_EFLAGS(fEFlags);
4767 } IEM_MC_ENDIF();
4768
4769 IEM_MC_ADVANCE_RIP_AND_FINISH();
4770 IEM_MC_END();
4771 }
4772}
4773
4774
4775/* Opcode 0xf3 0x0f 0x2e - invalid */
4776/* Opcode 0xf2 0x0f 0x2e - invalid */
4777
4778
4779/**
4780 * @opcode 0x2e
4781 * @oppfx none
4782 * @opflmodify cf,pf,af,zf,sf,of
4783 * @opflclear af,sf,of
4784 */
4785FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4786{
4787 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4789 if (IEM_IS_MODRM_REG_MODE(bRm))
4790 {
4791 /*
4792 * Register, register.
4793 */
4794 IEM_MC_BEGIN(0, 0);
4795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4796 IEM_MC_LOCAL(uint32_t, fEFlags);
4797 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4798 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4799 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4800 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4801 IEM_MC_PREPARE_SSE_USAGE();
4802 IEM_MC_FETCH_EFLAGS(fEFlags);
4803 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4804 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4805 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4806 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4807 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4808 } IEM_MC_ELSE() {
4809 IEM_MC_COMMIT_EFLAGS(fEFlags);
4810 } IEM_MC_ENDIF();
4811
4812 IEM_MC_ADVANCE_RIP_AND_FINISH();
4813 IEM_MC_END();
4814 }
4815 else
4816 {
4817 /*
4818 * Register, memory.
4819 */
4820 IEM_MC_BEGIN(0, 0);
4821 IEM_MC_LOCAL(uint32_t, fEFlags);
4822 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4823 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4824 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4826
4827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4829 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4830 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4831
4832 IEM_MC_PREPARE_SSE_USAGE();
4833 IEM_MC_FETCH_EFLAGS(fEFlags);
4834 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4835 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4836 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4837 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4838 } IEM_MC_ELSE() {
4839 IEM_MC_COMMIT_EFLAGS(fEFlags);
4840 } IEM_MC_ENDIF();
4841
4842 IEM_MC_ADVANCE_RIP_AND_FINISH();
4843 IEM_MC_END();
4844 }
4845}
4846
4847
4848/**
4849 * @opcode 0x2f
4850 * @oppfx 0x66
4851 * @opflmodify cf,pf,af,zf,sf,of
4852 * @opflclear af,sf,of
4853 */
4854FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4855{
4856 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4858 if (IEM_IS_MODRM_REG_MODE(bRm))
4859 {
4860 /*
4861 * Register, register.
4862 */
4863 IEM_MC_BEGIN(0, 0);
4864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4865 IEM_MC_LOCAL(uint32_t, fEFlags);
4866 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4867 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4868 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4869 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4870 IEM_MC_PREPARE_SSE_USAGE();
4871 IEM_MC_FETCH_EFLAGS(fEFlags);
4872 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4873 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4874 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4875 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4876 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4877 } IEM_MC_ELSE() {
4878 IEM_MC_COMMIT_EFLAGS(fEFlags);
4879 } IEM_MC_ENDIF();
4880
4881 IEM_MC_ADVANCE_RIP_AND_FINISH();
4882 IEM_MC_END();
4883 }
4884 else
4885 {
4886 /*
4887 * Register, memory.
4888 */
4889 IEM_MC_BEGIN(0, 0);
4890 IEM_MC_LOCAL(uint32_t, fEFlags);
4891 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4892 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4893 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4895
4896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4899 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4900
4901 IEM_MC_PREPARE_SSE_USAGE();
4902 IEM_MC_FETCH_EFLAGS(fEFlags);
4903 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4904 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4905 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4906 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4907 } IEM_MC_ELSE() {
4908 IEM_MC_COMMIT_EFLAGS(fEFlags);
4909 } IEM_MC_ENDIF();
4910
4911 IEM_MC_ADVANCE_RIP_AND_FINISH();
4912 IEM_MC_END();
4913 }
4914}
4915
4916
4917/* Opcode 0xf3 0x0f 0x2f - invalid */
4918/* Opcode 0xf2 0x0f 0x2f - invalid */
4919
4920/** Opcode 0x0f 0x30. */
4921FNIEMOP_DEF(iemOp_wrmsr)
4922{
4923 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4925 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4926}
4927
4928
4929/** Opcode 0x0f 0x31. */
4930FNIEMOP_DEF(iemOp_rdtsc)
4931{
4932 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4934 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4935 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4936 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4937 iemCImpl_rdtsc);
4938}
4939
4940
4941/** Opcode 0x0f 0x33. */
4942FNIEMOP_DEF(iemOp_rdmsr)
4943{
4944 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4946 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4947 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4948 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4949 iemCImpl_rdmsr);
4950}
4951
4952
4953/** Opcode 0x0f 0x34. */
4954FNIEMOP_DEF(iemOp_rdpmc)
4955{
4956 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4958 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4959 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4960 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4961 iemCImpl_rdpmc);
4962}
4963
4964
4965/** Opcode 0x0f 0x34. */
4966FNIEMOP_DEF(iemOp_sysenter)
4967{
4968 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4970 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4971 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4972 iemCImpl_sysenter);
4973}
4974
4975/** Opcode 0x0f 0x35. */
4976FNIEMOP_DEF(iemOp_sysexit)
4977{
4978 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4980 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4981 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4982 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4983}
4984
4985/** Opcode 0x0f 0x37. */
4986FNIEMOP_STUB(iemOp_getsec);
4987
4988
4989/** Opcode 0x0f 0x38. */
4990FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4991{
4992#ifdef IEM_WITH_THREE_0F_38
4993 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4994 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4995#else
4996 IEMOP_BITCH_ABOUT_STUB();
4997 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4998#endif
4999}
5000
5001
5002/** Opcode 0x0f 0x3a. */
5003FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5004{
5005#ifdef IEM_WITH_THREE_0F_3A
5006 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5007 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5008#else
5009 IEMOP_BITCH_ABOUT_STUB();
5010 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5011#endif
5012}
5013
5014
5015/**
5016 * Implements a conditional move.
5017 *
5018 * Wish there was an obvious way to do this where we could share and reduce
5019 * code bloat.
5020 *
5021 * @param a_Cnd The conditional "microcode" operation.
5022 */
5023#define CMOV_X(a_Cnd) \
5024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5025 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5026 { \
5027 switch (pVCpu->iem.s.enmEffOpSize) \
5028 { \
5029 case IEMMODE_16BIT: \
5030 IEM_MC_BEGIN(0, 0); \
5031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5032 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5033 a_Cnd { \
5034 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5035 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5036 } IEM_MC_ENDIF(); \
5037 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5038 IEM_MC_END(); \
5039 break; \
5040 \
5041 case IEMMODE_32BIT: \
5042 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5044 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5045 a_Cnd { \
5046 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5047 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5048 } IEM_MC_ELSE() { \
5049 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5050 } IEM_MC_ENDIF(); \
5051 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5052 IEM_MC_END(); \
5053 break; \
5054 \
5055 case IEMMODE_64BIT: \
5056 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5058 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5059 a_Cnd { \
5060 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5061 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5062 } IEM_MC_ENDIF(); \
5063 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5064 IEM_MC_END(); \
5065 break; \
5066 \
5067 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5068 } \
5069 } \
5070 else \
5071 { \
5072 switch (pVCpu->iem.s.enmEffOpSize) \
5073 { \
5074 case IEMMODE_16BIT: \
5075 IEM_MC_BEGIN(0, 0); \
5076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5077 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5080 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5081 a_Cnd { \
5082 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5083 } IEM_MC_ENDIF(); \
5084 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5085 IEM_MC_END(); \
5086 break; \
5087 \
5088 case IEMMODE_32BIT: \
5089 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5091 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5094 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5095 a_Cnd { \
5096 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5097 } IEM_MC_ELSE() { \
5098 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5099 } IEM_MC_ENDIF(); \
5100 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5101 IEM_MC_END(); \
5102 break; \
5103 \
5104 case IEMMODE_64BIT: \
5105 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5107 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5110 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5111 a_Cnd { \
5112 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5113 } IEM_MC_ENDIF(); \
5114 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5115 IEM_MC_END(); \
5116 break; \
5117 \
5118 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5119 } \
5120 } do {} while (0)
5121
5122
5123
5124/**
5125 * @opcode 0x40
5126 * @opfltest of
5127 */
5128FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5129{
5130 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5131 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5132}
5133
5134
5135/**
5136 * @opcode 0x41
5137 * @opfltest of
5138 */
5139FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5140{
5141 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5142 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5143}
5144
5145
5146/**
5147 * @opcode 0x42
5148 * @opfltest cf
5149 */
5150FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5151{
5152 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5153 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5154}
5155
5156
5157/**
5158 * @opcode 0x43
5159 * @opfltest cf
5160 */
5161FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5162{
5163 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5164 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5165}
5166
5167
5168/**
5169 * @opcode 0x44
5170 * @opfltest zf
5171 */
5172FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5173{
5174 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5175 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5176}
5177
5178
5179/**
5180 * @opcode 0x45
5181 * @opfltest zf
5182 */
5183FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5184{
5185 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5186 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5187}
5188
5189
5190/**
5191 * @opcode 0x46
5192 * @opfltest cf,zf
5193 */
5194FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5195{
5196 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5197 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5198}
5199
5200
5201/**
5202 * @opcode 0x47
5203 * @opfltest cf,zf
5204 */
5205FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5206{
5207 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5208 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5209}
5210
5211
5212/**
5213 * @opcode 0x48
5214 * @opfltest sf
5215 */
5216FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5217{
5218 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5219 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5220}
5221
5222
5223/**
5224 * @opcode 0x49
5225 * @opfltest sf
5226 */
5227FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5228{
5229 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5230 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5231}
5232
5233
5234/**
5235 * @opcode 0x4a
5236 * @opfltest pf
5237 */
5238FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5239{
5240 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5241 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5242}
5243
5244
5245/**
5246 * @opcode 0x4b
5247 * @opfltest pf
5248 */
5249FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5250{
5251 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5252 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5253}
5254
5255
5256/**
5257 * @opcode 0x4c
5258 * @opfltest sf,of
5259 */
5260FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5261{
5262 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5263 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5264}
5265
5266
5267/**
5268 * @opcode 0x4d
5269 * @opfltest sf,of
5270 */
5271FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5272{
5273 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5274 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5275}
5276
5277
5278/**
5279 * @opcode 0x4e
5280 * @opfltest zf,sf,of
5281 */
5282FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5283{
5284 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5285 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5286}
5287
5288
5289/**
5290 * @opcode 0x4e
5291 * @opfltest zf,sf,of
5292 */
5293FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5294{
5295 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5296 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5297}
5298
5299#undef CMOV_X
5300
5301/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5302FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5303{
5304 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5306 if (IEM_IS_MODRM_REG_MODE(bRm))
5307 {
5308 /*
5309 * Register, register.
5310 */
5311 IEM_MC_BEGIN(0, 0);
5312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5313 IEM_MC_LOCAL(uint8_t, u8Dst);
5314 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5315 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5316 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5317 IEM_MC_PREPARE_SSE_USAGE();
5318 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5319 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5320 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5321 IEM_MC_ADVANCE_RIP_AND_FINISH();
5322 IEM_MC_END();
5323 }
5324 /* No memory operand. */
5325 else
5326 IEMOP_RAISE_INVALID_OPCODE_RET();
5327}
5328
5329
5330/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5331FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5332{
5333 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5335 if (IEM_IS_MODRM_REG_MODE(bRm))
5336 {
5337 /*
5338 * Register, register.
5339 */
5340 IEM_MC_BEGIN(0, 0);
5341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5342 IEM_MC_LOCAL(uint8_t, u8Dst);
5343 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5344 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5345 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5346 IEM_MC_PREPARE_SSE_USAGE();
5347 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5348 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5349 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5350 IEM_MC_ADVANCE_RIP_AND_FINISH();
5351 IEM_MC_END();
5352 }
5353 /* No memory operand. */
5354 else
5355 IEMOP_RAISE_INVALID_OPCODE_RET();
5356
5357}
5358
5359
5360/* Opcode 0xf3 0x0f 0x50 - invalid */
5361/* Opcode 0xf2 0x0f 0x50 - invalid */
5362
5363
5364/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5365FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5366{
5367 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5368 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5369}
5370
5371
5372/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5373FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5374{
5375 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5376 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5377}
5378
5379
5380/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5381FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5382{
5383 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5384 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5385}
5386
5387
5388/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5389FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5390{
5391 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5392 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5393}
5394
5395
5396/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5397FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5398{
5399 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5400 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5401}
5402
5403
5404/* Opcode 0x66 0x0f 0x52 - invalid */
5405
5406
5407/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5408FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5409{
5410 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5411 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5412}
5413
5414
5415/* Opcode 0xf2 0x0f 0x52 - invalid */
5416
5417
5418/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5419FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5420{
5421 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5422 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5423}
5424
5425
5426/* Opcode 0x66 0x0f 0x53 - invalid */
5427
5428
5429/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5430FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5431{
5432 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5433 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5434}
5435
5436
5437/* Opcode 0xf2 0x0f 0x53 - invalid */
5438
5439
5440/** Opcode 0x0f 0x54 - andps Vps, Wps */
5441FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5442{
5443 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5444 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pand_u128);
5445}
5446
5447
5448/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5449FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5450{
5451 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5452 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pand_u128);
5453}
5454
5455
5456/* Opcode 0xf3 0x0f 0x54 - invalid */
5457/* Opcode 0xf2 0x0f 0x54 - invalid */
5458
5459
5460/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5461FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5462{
5463 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5464 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5465}
5466
5467
5468/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5469FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5470{
5471 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5472 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5473}
5474
5475
5476/* Opcode 0xf3 0x0f 0x55 - invalid */
5477/* Opcode 0xf2 0x0f 0x55 - invalid */
5478
5479
5480/** Opcode 0x0f 0x56 - orps Vps, Wps */
5481FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5482{
5483 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5484 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_por_u128);
5485}
5486
5487
5488/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5489FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5490{
5491 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5492 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_por_u128);
5493}
5494
5495
5496/* Opcode 0xf3 0x0f 0x56 - invalid */
5497/* Opcode 0xf2 0x0f 0x56 - invalid */
5498
5499
5500/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5501FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5502{
5503 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5504 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pxor_u128);
5505}
5506
5507
5508/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5509FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5510{
5511 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5512 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pxor_u128);
5513}
5514
5515
5516/* Opcode 0xf3 0x0f 0x57 - invalid */
5517/* Opcode 0xf2 0x0f 0x57 - invalid */
5518
5519/** Opcode 0x0f 0x58 - addps Vps, Wps */
5520FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5521{
5522 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5523 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5524}
5525
5526
5527/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5528FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5529{
5530 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5531 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5532}
5533
5534
5535/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5536FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5537{
5538 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5539 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5540}
5541
5542
5543/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5544FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5545{
5546 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5547 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5548}
5549
5550
5551/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5552FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5553{
5554 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5555 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5556}
5557
5558
5559/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5560FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5561{
5562 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5563 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5564}
5565
5566
5567/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5568FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5569{
5570 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5571 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5572}
5573
5574
5575/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5576FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5577{
5578 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5579 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5580}
5581
5582
5583/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5584FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5585{
5586 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5587 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5588}
5589
5590
5591/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5592FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5593{
5594 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5595 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5596}
5597
5598
5599/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5600FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5601{
5602 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5603 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5604}
5605
5606
5607/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5608FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5609{
5610 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5611 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5612}
5613
5614
5615/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5616FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5617{
5618 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5619 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5620}
5621
5622
5623/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5624FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5625{
5626 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5627 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5628}
5629
5630
5631/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5632FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5633{
5634 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5635 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5636}
5637
5638
5639/* Opcode 0xf2 0x0f 0x5b - invalid */
5640
5641
5642/** Opcode 0x0f 0x5c - subps Vps, Wps */
5643FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5644{
5645 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5646 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5647}
5648
5649
5650/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5651FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5652{
5653 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5654 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5655}
5656
5657
5658/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5659FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5660{
5661 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5662 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5663}
5664
5665
5666/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5667FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5668{
5669 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5670 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5671}
5672
5673
5674/** Opcode 0x0f 0x5d - minps Vps, Wps */
5675FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5676{
5677 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5678 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5679}
5680
5681
5682/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5683FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5684{
5685 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5686 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5687}
5688
5689
5690/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5691FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5692{
5693 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5694 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5695}
5696
5697
5698/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5699FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5700{
5701 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5702 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5703}
5704
5705
5706/** Opcode 0x0f 0x5e - divps Vps, Wps */
5707FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5708{
5709 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5710 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5711}
5712
5713
5714/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5715FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5716{
5717 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5718 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5719}
5720
5721
5722/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5723FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5724{
5725 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5726 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5727}
5728
5729
5730/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5731FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5732{
5733 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5734 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5735}
5736
5737
5738/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5739FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5740{
5741 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5742 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5743}
5744
5745
5746/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5747FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5748{
5749 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5750 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5751}
5752
5753
5754/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5755FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5756{
5757 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5758 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5759}
5760
5761
5762/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5763FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5764{
5765 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5766 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5767}
5768
5769
5770/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5771FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5772{
5773 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5774 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5775}
5776
5777
5778/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5779FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5780{
5781 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5782 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5783}
5784
5785
5786/* Opcode 0xf3 0x0f 0x60 - invalid */
5787
5788
5789/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5790FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5791{
5792 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5793 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5794 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5795}
5796
5797
5798/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5799FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5800{
5801 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5802 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5803}
5804
5805
5806/* Opcode 0xf3 0x0f 0x61 - invalid */
5807
5808
5809/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5810FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5811{
5812 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5813 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5814}
5815
5816
5817/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5818FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5819{
5820 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5821 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5822}
5823
5824
5825/* Opcode 0xf3 0x0f 0x62 - invalid */
5826
5827
5828
5829/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5830FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5831{
5832 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5833 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5834}
5835
5836
5837/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5838FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5839{
5840 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5841 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5842}
5843
5844
5845/* Opcode 0xf3 0x0f 0x63 - invalid */
5846
5847
5848/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5849FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5850{
5851 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5852 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5853}
5854
5855
5856/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5857FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5858{
5859 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5860 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5861}
5862
5863
5864/* Opcode 0xf3 0x0f 0x64 - invalid */
5865
5866
5867/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5868FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5869{
5870 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5871 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5872}
5873
5874
5875/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5876FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5877{
5878 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5879 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5880}
5881
5882
5883/* Opcode 0xf3 0x0f 0x65 - invalid */
5884
5885
5886/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5887FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5888{
5889 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5890 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5891}
5892
5893
5894/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5895FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5896{
5897 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5898 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5899}
5900
5901
5902/* Opcode 0xf3 0x0f 0x66 - invalid */
5903
5904
5905/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5906FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5907{
5908 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5909 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5910}
5911
5912
5913/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5914FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5915{
5916 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5917 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
5918}
5919
5920
5921/* Opcode 0xf3 0x0f 0x67 - invalid */
5922
5923
5924/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5925 * @note Intel and AMD both uses Qd for the second parameter, however they
5926 * both list it as a mmX/mem64 operand and intel describes it as being
5927 * loaded as a qword, so it should be Qq, shouldn't it? */
5928FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5929{
5930 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5931 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5932}
5933
5934
5935/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5936FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5937{
5938 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5939 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5940}
5941
5942
5943/* Opcode 0xf3 0x0f 0x68 - invalid */
5944
5945
5946/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5947 * @note Intel and AMD both uses Qd for the second parameter, however they
5948 * both list it as a mmX/mem64 operand and intel describes it as being
5949 * loaded as a qword, so it should be Qq, shouldn't it? */
5950FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5951{
5952 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5953 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5954}
5955
5956
5957/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5958FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5959{
5960 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5961 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5962
5963}
5964
5965
5966/* Opcode 0xf3 0x0f 0x69 - invalid */
5967
5968
5969/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5970 * @note Intel and AMD both uses Qd for the second parameter, however they
5971 * both list it as a mmX/mem64 operand and intel describes it as being
5972 * loaded as a qword, so it should be Qq, shouldn't it? */
5973FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5974{
5975 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5976 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5977}
5978
5979
5980/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5981FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5982{
5983 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5984 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5985}
5986
5987
5988/* Opcode 0xf3 0x0f 0x6a - invalid */
5989
5990
5991/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5992FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5993{
5994 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5995 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5996}
5997
5998
5999/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6000FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6001{
6002 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6003 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6004}
6005
6006
6007/* Opcode 0xf3 0x0f 0x6b - invalid */
6008
6009
6010/* Opcode 0x0f 0x6c - invalid */
6011
6012
6013/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6014FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6015{
6016 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6017 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6018}
6019
6020
6021/* Opcode 0xf3 0x0f 0x6c - invalid */
6022/* Opcode 0xf2 0x0f 0x6c - invalid */
6023
6024
6025/* Opcode 0x0f 0x6d - invalid */
6026
6027
6028/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6029FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6030{
6031 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6032 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6033}
6034
6035
6036/* Opcode 0xf3 0x0f 0x6d - invalid */
6037
6038
6039FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6040{
6041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6042 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6043 {
6044 /**
6045 * @opcode 0x6e
6046 * @opcodesub rex.w=1
6047 * @oppfx none
6048 * @opcpuid mmx
6049 * @opgroup og_mmx_datamove
6050 * @opxcpttype 5
6051 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6052 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6053 */
6054 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6055 if (IEM_IS_MODRM_REG_MODE(bRm))
6056 {
6057 /* MMX, greg64 */
6058 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6060 IEM_MC_LOCAL(uint64_t, u64Tmp);
6061
6062 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6063 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6064 IEM_MC_FPU_TO_MMX_MODE();
6065
6066 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6067 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6068
6069 IEM_MC_ADVANCE_RIP_AND_FINISH();
6070 IEM_MC_END();
6071 }
6072 else
6073 {
6074 /* MMX, [mem64] */
6075 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6077 IEM_MC_LOCAL(uint64_t, u64Tmp);
6078
6079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6081 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6082 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6083
6084 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6085 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6086 IEM_MC_FPU_TO_MMX_MODE();
6087
6088 IEM_MC_ADVANCE_RIP_AND_FINISH();
6089 IEM_MC_END();
6090 }
6091 }
6092 else
6093 {
6094 /**
6095 * @opdone
6096 * @opcode 0x6e
6097 * @opcodesub rex.w=0
6098 * @oppfx none
6099 * @opcpuid mmx
6100 * @opgroup og_mmx_datamove
6101 * @opxcpttype 5
6102 * @opfunction iemOp_movd_q_Pd_Ey
6103 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6104 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6105 */
6106 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6107 if (IEM_IS_MODRM_REG_MODE(bRm))
6108 {
6109 /* MMX, greg32 */
6110 IEM_MC_BEGIN(0, 0);
6111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6112 IEM_MC_LOCAL(uint32_t, u32Tmp);
6113
6114 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6115 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6116 IEM_MC_FPU_TO_MMX_MODE();
6117
6118 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6119 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6120
6121 IEM_MC_ADVANCE_RIP_AND_FINISH();
6122 IEM_MC_END();
6123 }
6124 else
6125 {
6126 /* MMX, [mem32] */
6127 IEM_MC_BEGIN(0, 0);
6128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6129 IEM_MC_LOCAL(uint32_t, u32Tmp);
6130
6131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6133 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6134 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6135
6136 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6137 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6138 IEM_MC_FPU_TO_MMX_MODE();
6139
6140 IEM_MC_ADVANCE_RIP_AND_FINISH();
6141 IEM_MC_END();
6142 }
6143 }
6144}
6145
6146FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6147{
6148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6149 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6150 {
6151 /**
6152 * @opcode 0x6e
6153 * @opcodesub rex.w=1
6154 * @oppfx 0x66
6155 * @opcpuid sse2
6156 * @opgroup og_sse2_simdint_datamove
6157 * @opxcpttype 5
6158 * @optest 64-bit / op1=1 op2=2 -> op1=2
6159 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6160 */
6161 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6162 if (IEM_IS_MODRM_REG_MODE(bRm))
6163 {
6164 /* XMM, greg64 */
6165 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6167 IEM_MC_LOCAL(uint64_t, u64Tmp);
6168
6169 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6170 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6171
6172 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6173 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6174
6175 IEM_MC_ADVANCE_RIP_AND_FINISH();
6176 IEM_MC_END();
6177 }
6178 else
6179 {
6180 /* XMM, [mem64] */
6181 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6183 IEM_MC_LOCAL(uint64_t, u64Tmp);
6184
6185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6187 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6188 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6189
6190 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6191 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6192
6193 IEM_MC_ADVANCE_RIP_AND_FINISH();
6194 IEM_MC_END();
6195 }
6196 }
6197 else
6198 {
6199 /**
6200 * @opdone
6201 * @opcode 0x6e
6202 * @opcodesub rex.w=0
6203 * @oppfx 0x66
6204 * @opcpuid sse2
6205 * @opgroup og_sse2_simdint_datamove
6206 * @opxcpttype 5
6207 * @opfunction iemOp_movd_q_Vy_Ey
6208 * @optest op1=1 op2=2 -> op1=2
6209 * @optest op1=0 op2=-42 -> op1=-42
6210 */
6211 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6212 if (IEM_IS_MODRM_REG_MODE(bRm))
6213 {
6214 /* XMM, greg32 */
6215 IEM_MC_BEGIN(0, 0);
6216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6217 IEM_MC_LOCAL(uint32_t, u32Tmp);
6218
6219 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6220 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6221
6222 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6223 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6224
6225 IEM_MC_ADVANCE_RIP_AND_FINISH();
6226 IEM_MC_END();
6227 }
6228 else
6229 {
6230 /* XMM, [mem32] */
6231 IEM_MC_BEGIN(0, 0);
6232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6233 IEM_MC_LOCAL(uint32_t, u32Tmp);
6234
6235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6237 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6238 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6239
6240 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6241 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6242
6243 IEM_MC_ADVANCE_RIP_AND_FINISH();
6244 IEM_MC_END();
6245 }
6246 }
6247}
6248
6249/* Opcode 0xf3 0x0f 0x6e - invalid */
6250
6251
6252/**
6253 * @opcode 0x6f
6254 * @oppfx none
6255 * @opcpuid mmx
6256 * @opgroup og_mmx_datamove
6257 * @opxcpttype 5
6258 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6259 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6260 */
6261FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6262{
6263 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6265 if (IEM_IS_MODRM_REG_MODE(bRm))
6266 {
6267 /*
6268 * Register, register.
6269 */
6270 IEM_MC_BEGIN(0, 0);
6271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6272 IEM_MC_LOCAL(uint64_t, u64Tmp);
6273
6274 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6275 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6276 IEM_MC_FPU_TO_MMX_MODE();
6277
6278 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6279 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6280
6281 IEM_MC_ADVANCE_RIP_AND_FINISH();
6282 IEM_MC_END();
6283 }
6284 else
6285 {
6286 /*
6287 * Register, memory.
6288 */
6289 IEM_MC_BEGIN(0, 0);
6290 IEM_MC_LOCAL(uint64_t, u64Tmp);
6291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6292
6293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6295 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6296 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6297
6298 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6299 IEM_MC_FPU_TO_MMX_MODE();
6300
6301 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6302
6303 IEM_MC_ADVANCE_RIP_AND_FINISH();
6304 IEM_MC_END();
6305 }
6306}
6307
6308/**
6309 * @opcode 0x6f
6310 * @oppfx 0x66
6311 * @opcpuid sse2
6312 * @opgroup og_sse2_simdint_datamove
6313 * @opxcpttype 1
6314 * @optest op1=1 op2=2 -> op1=2
6315 * @optest op1=0 op2=-42 -> op1=-42
6316 */
6317FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6318{
6319 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6321 if (IEM_IS_MODRM_REG_MODE(bRm))
6322 {
6323 /*
6324 * Register, register.
6325 */
6326 IEM_MC_BEGIN(0, 0);
6327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6328
6329 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6330 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6331
6332 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6333 IEM_GET_MODRM_RM(pVCpu, bRm));
6334 IEM_MC_ADVANCE_RIP_AND_FINISH();
6335 IEM_MC_END();
6336 }
6337 else
6338 {
6339 /*
6340 * Register, memory.
6341 */
6342 IEM_MC_BEGIN(0, 0);
6343 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6345
6346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6348 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6349 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6350
6351 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6352 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6353
6354 IEM_MC_ADVANCE_RIP_AND_FINISH();
6355 IEM_MC_END();
6356 }
6357}
6358
6359/**
6360 * @opcode 0x6f
6361 * @oppfx 0xf3
6362 * @opcpuid sse2
6363 * @opgroup og_sse2_simdint_datamove
6364 * @opxcpttype 4UA
6365 * @optest op1=1 op2=2 -> op1=2
6366 * @optest op1=0 op2=-42 -> op1=-42
6367 */
6368FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6369{
6370 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6372 if (IEM_IS_MODRM_REG_MODE(bRm))
6373 {
6374 /*
6375 * Register, register.
6376 */
6377 IEM_MC_BEGIN(0, 0);
6378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6379 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6380 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6381 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6382 IEM_GET_MODRM_RM(pVCpu, bRm));
6383 IEM_MC_ADVANCE_RIP_AND_FINISH();
6384 IEM_MC_END();
6385 }
6386 else
6387 {
6388 /*
6389 * Register, memory.
6390 */
6391 IEM_MC_BEGIN(0, 0);
6392 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6394
6395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6397 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6398 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6399 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6400 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6401
6402 IEM_MC_ADVANCE_RIP_AND_FINISH();
6403 IEM_MC_END();
6404 }
6405}
6406
6407
6408/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6409FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6410{
6411 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6413 if (IEM_IS_MODRM_REG_MODE(bRm))
6414 {
6415 /*
6416 * Register, register.
6417 */
6418 IEM_MC_BEGIN(0, 0);
6419 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6421 IEM_MC_ARG(uint64_t *, pDst, 0);
6422 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6423 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6424 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6425 IEM_MC_PREPARE_FPU_USAGE();
6426 IEM_MC_FPU_TO_MMX_MODE();
6427
6428 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6429 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6430 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6431 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6432
6433 IEM_MC_ADVANCE_RIP_AND_FINISH();
6434 IEM_MC_END();
6435 }
6436 else
6437 {
6438 /*
6439 * Register, memory.
6440 */
6441 IEM_MC_BEGIN(0, 0);
6442 IEM_MC_ARG(uint64_t *, pDst, 0);
6443 IEM_MC_LOCAL(uint64_t, uSrc);
6444 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6446
6447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6448 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6449 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6451 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6452 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6453
6454 IEM_MC_PREPARE_FPU_USAGE();
6455 IEM_MC_FPU_TO_MMX_MODE();
6456
6457 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6458 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6459 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6460
6461 IEM_MC_ADVANCE_RIP_AND_FINISH();
6462 IEM_MC_END();
6463 }
6464}
6465
6466
6467/**
6468 * Common worker for SSE2 instructions on the forms:
6469 * pshufd xmm1, xmm2/mem128, imm8
6470 * pshufhw xmm1, xmm2/mem128, imm8
6471 * pshuflw xmm1, xmm2/mem128, imm8
6472 *
6473 * Proper alignment of the 128-bit operand is enforced.
6474 * Exceptions type 4. SSE2 cpuid checks.
6475 */
6476FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6477{
6478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6479 if (IEM_IS_MODRM_REG_MODE(bRm))
6480 {
6481 /*
6482 * Register, register.
6483 */
6484 IEM_MC_BEGIN(0, 0);
6485 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6487 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6488 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6489 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6490 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6491 IEM_MC_PREPARE_SSE_USAGE();
6492 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6493 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6494 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6495 IEM_MC_ADVANCE_RIP_AND_FINISH();
6496 IEM_MC_END();
6497 }
6498 else
6499 {
6500 /*
6501 * Register, memory.
6502 */
6503 IEM_MC_BEGIN(0, 0);
6504 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6505 IEM_MC_LOCAL(RTUINT128U, uSrc);
6506 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6508
6509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6510 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6511 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6513 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6514
6515 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6516 IEM_MC_PREPARE_SSE_USAGE();
6517 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6518 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6519
6520 IEM_MC_ADVANCE_RIP_AND_FINISH();
6521 IEM_MC_END();
6522 }
6523}
6524
6525
6526/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6527FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6528{
6529 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6530 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6531}
6532
6533
6534/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6535FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6536{
6537 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6538 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6539}
6540
6541
6542/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6543FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6544{
6545 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6546 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6547}
6548
6549
6550/**
6551 * Common worker for MMX instructions of the form:
6552 * psrlw mm, imm8
6553 * psraw mm, imm8
6554 * psllw mm, imm8
6555 * psrld mm, imm8
6556 * psrad mm, imm8
6557 * pslld mm, imm8
6558 * psrlq mm, imm8
6559 * psllq mm, imm8
6560 *
6561 */
6562FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6563{
6564 if (IEM_IS_MODRM_REG_MODE(bRm))
6565 {
6566 /*
6567 * Register, immediate.
6568 */
6569 IEM_MC_BEGIN(0, 0);
6570 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6572 IEM_MC_ARG(uint64_t *, pDst, 0);
6573 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6574 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6575 IEM_MC_PREPARE_FPU_USAGE();
6576 IEM_MC_FPU_TO_MMX_MODE();
6577
6578 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6579 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6580 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6581
6582 IEM_MC_ADVANCE_RIP_AND_FINISH();
6583 IEM_MC_END();
6584 }
6585 else
6586 {
6587 /*
6588 * Register, memory not supported.
6589 */
6590 /// @todo Caller already enforced register mode?!
6591 AssertFailedReturn(VINF_SUCCESS);
6592 }
6593}
6594
6595
6596/**
6597 * Common worker for SSE2 instructions of the form:
6598 * psrlw xmm, imm8
6599 * psraw xmm, imm8
6600 * psllw xmm, imm8
6601 * psrld xmm, imm8
6602 * psrad xmm, imm8
6603 * pslld xmm, imm8
6604 * psrlq xmm, imm8
6605 * psllq xmm, imm8
6606 *
6607 */
6608FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6609{
6610 if (IEM_IS_MODRM_REG_MODE(bRm))
6611 {
6612 /*
6613 * Register, immediate.
6614 */
6615 IEM_MC_BEGIN(0, 0);
6616 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6618 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6619 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6620 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6621 IEM_MC_PREPARE_SSE_USAGE();
6622 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6623 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6624 IEM_MC_ADVANCE_RIP_AND_FINISH();
6625 IEM_MC_END();
6626 }
6627 else
6628 {
6629 /*
6630 * Register, memory.
6631 */
6632 /// @todo Caller already enforced register mode?!
6633 AssertFailedReturn(VINF_SUCCESS);
6634 }
6635}
6636
6637
6638/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6639FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6640{
6641// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6642 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6643}
6644
6645
6646/** Opcode 0x66 0x0f 0x71 11/2. */
6647FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6648{
6649// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6650 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6651}
6652
6653
6654/** Opcode 0x0f 0x71 11/4. */
6655FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6656{
6657// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6658 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6659}
6660
6661
6662/** Opcode 0x66 0x0f 0x71 11/4. */
6663FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6664{
6665// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6666 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6667}
6668
6669
6670/** Opcode 0x0f 0x71 11/6. */
6671FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6672{
6673// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6674 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6675}
6676
6677
6678/** Opcode 0x66 0x0f 0x71 11/6. */
6679FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6680{
6681// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6682 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6683}
6684
6685
6686/**
6687 * Group 12 jump table for register variant.
6688 */
6689IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6690{
6691 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6692 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6693 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6694 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6695 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6696 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6697 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6698 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6699};
6700AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6701
6702
6703/** Opcode 0x0f 0x71. */
6704FNIEMOP_DEF(iemOp_Grp12)
6705{
6706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6707 if (IEM_IS_MODRM_REG_MODE(bRm))
6708 /* register, register */
6709 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6710 + pVCpu->iem.s.idxPrefix], bRm);
6711 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6712}
6713
6714
6715/** Opcode 0x0f 0x72 11/2. */
6716FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6717{
6718// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6719 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6720}
6721
6722
6723/** Opcode 0x66 0x0f 0x72 11/2. */
6724FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6725{
6726// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6727 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6728}
6729
6730
6731/** Opcode 0x0f 0x72 11/4. */
6732FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6733{
6734// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6735 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6736}
6737
6738
6739/** Opcode 0x66 0x0f 0x72 11/4. */
6740FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6741{
6742// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6743 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6744}
6745
6746
6747/** Opcode 0x0f 0x72 11/6. */
6748FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6749{
6750// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6751 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6752}
6753
6754/** Opcode 0x66 0x0f 0x72 11/6. */
6755FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6756{
6757// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6758 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6759}
6760
6761
6762/**
6763 * Group 13 jump table for register variant.
6764 */
6765IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6766{
6767 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6768 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6769 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6770 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6771 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6772 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6773 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6774 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6775};
6776AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6777
6778/** Opcode 0x0f 0x72. */
6779FNIEMOP_DEF(iemOp_Grp13)
6780{
6781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6782 if (IEM_IS_MODRM_REG_MODE(bRm))
6783 /* register, register */
6784 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6785 + pVCpu->iem.s.idxPrefix], bRm);
6786 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6787}
6788
6789
6790/** Opcode 0x0f 0x73 11/2. */
6791FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6792{
6793// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6794 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6795}
6796
6797
6798/** Opcode 0x66 0x0f 0x73 11/2. */
6799FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6800{
6801// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6802 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6803}
6804
6805
6806/** Opcode 0x66 0x0f 0x73 11/3. */
6807FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6808{
6809// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6810 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6811}
6812
6813
6814/** Opcode 0x0f 0x73 11/6. */
6815FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6816{
6817// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6818 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6819}
6820
6821
6822/** Opcode 0x66 0x0f 0x73 11/6. */
6823FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6824{
6825// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6826 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6827}
6828
6829
6830/** Opcode 0x66 0x0f 0x73 11/7. */
6831FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6832{
6833// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6834 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6835}
6836
6837/**
6838 * Group 14 jump table for register variant.
6839 */
6840IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6841{
6842 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6843 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6844 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6845 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6846 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6847 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6848 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6849 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6850};
6851AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6852
6853
6854/** Opcode 0x0f 0x73. */
6855FNIEMOP_DEF(iemOp_Grp14)
6856{
6857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6858 if (IEM_IS_MODRM_REG_MODE(bRm))
6859 /* register, register */
6860 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6861 + pVCpu->iem.s.idxPrefix], bRm);
6862 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6863}
6864
6865
6866/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6867FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6868{
6869 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6870 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6871}
6872
6873
6874/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6875FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6876{
6877 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6878 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
6879}
6880
6881
6882/* Opcode 0xf3 0x0f 0x74 - invalid */
6883/* Opcode 0xf2 0x0f 0x74 - invalid */
6884
6885
6886/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6887FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6888{
6889 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6890 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6891}
6892
6893
6894/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6895FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6896{
6897 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6898 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
6899}
6900
6901
6902/* Opcode 0xf3 0x0f 0x75 - invalid */
6903/* Opcode 0xf2 0x0f 0x75 - invalid */
6904
6905
6906/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6907FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6908{
6909 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6910 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6911}
6912
6913
6914/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6915FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6916{
6917 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6918 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
6919}
6920
6921
6922/* Opcode 0xf3 0x0f 0x76 - invalid */
6923/* Opcode 0xf2 0x0f 0x76 - invalid */
6924
6925
6926/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6927FNIEMOP_DEF(iemOp_emms)
6928{
6929 IEMOP_MNEMONIC(emms, "emms");
6930 IEM_MC_BEGIN(0, 0);
6931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6932 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6933 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6934 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6935 IEM_MC_FPU_FROM_MMX_MODE();
6936 IEM_MC_ADVANCE_RIP_AND_FINISH();
6937 IEM_MC_END();
6938}
6939
6940/* Opcode 0x66 0x0f 0x77 - invalid */
6941/* Opcode 0xf3 0x0f 0x77 - invalid */
6942/* Opcode 0xf2 0x0f 0x77 - invalid */
6943
6944/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6945#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6946FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6947{
6948 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6949 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6950 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6951 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6952
6953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6954 if (IEM_IS_MODRM_REG_MODE(bRm))
6955 {
6956 /*
6957 * Register, register.
6958 */
6959 if (enmEffOpSize == IEMMODE_64BIT)
6960 {
6961 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6962 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6963 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6964 IEM_MC_ARG(uint64_t, u64Enc, 1);
6965 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6966 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6967 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6968 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6969 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6970 IEM_MC_END();
6971 }
6972 else
6973 {
6974 IEM_MC_BEGIN(0, 0);
6975 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6976 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6977 IEM_MC_ARG(uint32_t, u32Enc, 1);
6978 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6979 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6980 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6981 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6982 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6983 IEM_MC_END();
6984 }
6985 }
6986 else
6987 {
6988 /*
6989 * Memory, register.
6990 */
6991 if (enmEffOpSize == IEMMODE_64BIT)
6992 {
6993 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6994 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6996 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6997 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6998 IEM_MC_ARG(uint64_t, u64Enc, 2);
6999 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7000 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7001 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7002 IEM_MC_END();
7003 }
7004 else
7005 {
7006 IEM_MC_BEGIN(0, 0);
7007 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7009 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7010 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7011 IEM_MC_ARG(uint32_t, u32Enc, 2);
7012 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7013 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7014 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7015 IEM_MC_END();
7016 }
7017 }
7018}
7019#else
7020FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
7021#endif
7022
7023/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7024FNIEMOP_STUB(iemOp_AmdGrp17);
7025/* Opcode 0xf3 0x0f 0x78 - invalid */
7026/* Opcode 0xf2 0x0f 0x78 - invalid */
7027
7028/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7029#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7030FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7031{
7032 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7033 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7034 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7035 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7036
7037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7038 if (IEM_IS_MODRM_REG_MODE(bRm))
7039 {
7040 /*
7041 * Register, register.
7042 */
7043 if (enmEffOpSize == IEMMODE_64BIT)
7044 {
7045 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7046 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7047 IEM_MC_ARG(uint64_t, u64Val, 0);
7048 IEM_MC_ARG(uint64_t, u64Enc, 1);
7049 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7050 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7051 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7052 IEM_MC_END();
7053 }
7054 else
7055 {
7056 IEM_MC_BEGIN(0, 0);
7057 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7058 IEM_MC_ARG(uint32_t, u32Val, 0);
7059 IEM_MC_ARG(uint32_t, u32Enc, 1);
7060 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7061 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7062 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7063 IEM_MC_END();
7064 }
7065 }
7066 else
7067 {
7068 /*
7069 * Register, memory.
7070 */
7071 if (enmEffOpSize == IEMMODE_64BIT)
7072 {
7073 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7074 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7076 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7077 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7078 IEM_MC_ARG(uint64_t, u64Enc, 2);
7079 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7080 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7081 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7082 IEM_MC_END();
7083 }
7084 else
7085 {
7086 IEM_MC_BEGIN(0, 0);
7087 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7089 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7090 IEM_MC_ARG(uint32_t, u32Enc, 2);
7091 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7092 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7093 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7094 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7095 IEM_MC_END();
7096 }
7097 }
7098}
7099#else
7100FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
7101#endif
7102/* Opcode 0x66 0x0f 0x79 - invalid */
7103/* Opcode 0xf3 0x0f 0x79 - invalid */
7104/* Opcode 0xf2 0x0f 0x79 - invalid */
7105
7106/* Opcode 0x0f 0x7a - invalid */
7107/* Opcode 0x66 0x0f 0x7a - invalid */
7108/* Opcode 0xf3 0x0f 0x7a - invalid */
7109/* Opcode 0xf2 0x0f 0x7a - invalid */
7110
7111/* Opcode 0x0f 0x7b - invalid */
7112/* Opcode 0x66 0x0f 0x7b - invalid */
7113/* Opcode 0xf3 0x0f 0x7b - invalid */
7114/* Opcode 0xf2 0x0f 0x7b - invalid */
7115
7116/* Opcode 0x0f 0x7c - invalid */
7117
7118
7119/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7120FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7121{
7122 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7123 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7124}
7125
7126
7127/* Opcode 0xf3 0x0f 0x7c - invalid */
7128
7129
7130/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7131FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7132{
7133 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7134 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7135}
7136
7137
7138/* Opcode 0x0f 0x7d - invalid */
7139
7140
7141/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7142FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7143{
7144 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7145 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7146}
7147
7148
7149/* Opcode 0xf3 0x0f 0x7d - invalid */
7150
7151
7152/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7153FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7154{
7155 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7156 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7157}
7158
7159
7160/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7161FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7162{
7163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7164 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7165 {
7166 /**
7167 * @opcode 0x7e
7168 * @opcodesub rex.w=1
7169 * @oppfx none
7170 * @opcpuid mmx
7171 * @opgroup og_mmx_datamove
7172 * @opxcpttype 5
7173 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7174 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7175 */
7176 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7177 if (IEM_IS_MODRM_REG_MODE(bRm))
7178 {
7179 /* greg64, MMX */
7180 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7182 IEM_MC_LOCAL(uint64_t, u64Tmp);
7183
7184 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7185 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7186 IEM_MC_FPU_TO_MMX_MODE();
7187
7188 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7189 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7190
7191 IEM_MC_ADVANCE_RIP_AND_FINISH();
7192 IEM_MC_END();
7193 }
7194 else
7195 {
7196 /* [mem64], MMX */
7197 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7199 IEM_MC_LOCAL(uint64_t, u64Tmp);
7200
7201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7204 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7205
7206 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7207 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7208 IEM_MC_FPU_TO_MMX_MODE();
7209
7210 IEM_MC_ADVANCE_RIP_AND_FINISH();
7211 IEM_MC_END();
7212 }
7213 }
7214 else
7215 {
7216 /**
7217 * @opdone
7218 * @opcode 0x7e
7219 * @opcodesub rex.w=0
7220 * @oppfx none
7221 * @opcpuid mmx
7222 * @opgroup og_mmx_datamove
7223 * @opxcpttype 5
7224 * @opfunction iemOp_movd_q_Pd_Ey
7225 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7226 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7227 */
7228 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7229 if (IEM_IS_MODRM_REG_MODE(bRm))
7230 {
7231 /* greg32, MMX */
7232 IEM_MC_BEGIN(0, 0);
7233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7234 IEM_MC_LOCAL(uint32_t, u32Tmp);
7235
7236 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7237 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7238 IEM_MC_FPU_TO_MMX_MODE();
7239
7240 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7241 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7242
7243 IEM_MC_ADVANCE_RIP_AND_FINISH();
7244 IEM_MC_END();
7245 }
7246 else
7247 {
7248 /* [mem32], MMX */
7249 IEM_MC_BEGIN(0, 0);
7250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7251 IEM_MC_LOCAL(uint32_t, u32Tmp);
7252
7253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7255 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7256 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7257
7258 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7259 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7260 IEM_MC_FPU_TO_MMX_MODE();
7261
7262 IEM_MC_ADVANCE_RIP_AND_FINISH();
7263 IEM_MC_END();
7264 }
7265 }
7266}
7267
7268
7269FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7270{
7271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7272 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7273 {
7274 /**
7275 * @opcode 0x7e
7276 * @opcodesub rex.w=1
7277 * @oppfx 0x66
7278 * @opcpuid sse2
7279 * @opgroup og_sse2_simdint_datamove
7280 * @opxcpttype 5
7281 * @optest 64-bit / op1=1 op2=2 -> op1=2
7282 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7283 */
7284 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7285 if (IEM_IS_MODRM_REG_MODE(bRm))
7286 {
7287 /* greg64, XMM */
7288 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7290 IEM_MC_LOCAL(uint64_t, u64Tmp);
7291
7292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7294
7295 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7296 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7297
7298 IEM_MC_ADVANCE_RIP_AND_FINISH();
7299 IEM_MC_END();
7300 }
7301 else
7302 {
7303 /* [mem64], XMM */
7304 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7306 IEM_MC_LOCAL(uint64_t, u64Tmp);
7307
7308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7312
7313 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7314 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7315
7316 IEM_MC_ADVANCE_RIP_AND_FINISH();
7317 IEM_MC_END();
7318 }
7319 }
7320 else
7321 {
7322 /**
7323 * @opdone
7324 * @opcode 0x7e
7325 * @opcodesub rex.w=0
7326 * @oppfx 0x66
7327 * @opcpuid sse2
7328 * @opgroup og_sse2_simdint_datamove
7329 * @opxcpttype 5
7330 * @opfunction iemOp_movd_q_Vy_Ey
7331 * @optest op1=1 op2=2 -> op1=2
7332 * @optest op1=0 op2=-42 -> op1=-42
7333 */
7334 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7335 if (IEM_IS_MODRM_REG_MODE(bRm))
7336 {
7337 /* greg32, XMM */
7338 IEM_MC_BEGIN(0, 0);
7339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7340 IEM_MC_LOCAL(uint32_t, u32Tmp);
7341
7342 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7343 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7344
7345 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7346 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7347
7348 IEM_MC_ADVANCE_RIP_AND_FINISH();
7349 IEM_MC_END();
7350 }
7351 else
7352 {
7353 /* [mem32], XMM */
7354 IEM_MC_BEGIN(0, 0);
7355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7356 IEM_MC_LOCAL(uint32_t, u32Tmp);
7357
7358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7360 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7362
7363 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7364 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7365
7366 IEM_MC_ADVANCE_RIP_AND_FINISH();
7367 IEM_MC_END();
7368 }
7369 }
7370}
7371
7372/**
7373 * @opcode 0x7e
7374 * @oppfx 0xf3
7375 * @opcpuid sse2
7376 * @opgroup og_sse2_pcksclr_datamove
7377 * @opxcpttype none
7378 * @optest op1=1 op2=2 -> op1=2
7379 * @optest op1=0 op2=-42 -> op1=-42
7380 */
7381FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7382{
7383 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7385 if (IEM_IS_MODRM_REG_MODE(bRm))
7386 {
7387 /*
7388 * XMM128, XMM64.
7389 */
7390 IEM_MC_BEGIN(0, 0);
7391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7392 IEM_MC_LOCAL(uint64_t, uSrc);
7393
7394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7396
7397 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7398 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7399
7400 IEM_MC_ADVANCE_RIP_AND_FINISH();
7401 IEM_MC_END();
7402 }
7403 else
7404 {
7405 /*
7406 * XMM128, [mem64].
7407 */
7408 IEM_MC_BEGIN(0, 0);
7409 IEM_MC_LOCAL(uint64_t, uSrc);
7410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7411
7412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7414 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7415 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7416
7417 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7418 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7419
7420 IEM_MC_ADVANCE_RIP_AND_FINISH();
7421 IEM_MC_END();
7422 }
7423}
7424
7425/* Opcode 0xf2 0x0f 0x7e - invalid */
7426
7427
7428/** Opcode 0x0f 0x7f - movq Qq, Pq */
7429FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7430{
7431 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7433 if (IEM_IS_MODRM_REG_MODE(bRm))
7434 {
7435 /*
7436 * MMX, MMX.
7437 */
7438 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7439 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7440 IEM_MC_BEGIN(0, 0);
7441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7442 IEM_MC_LOCAL(uint64_t, u64Tmp);
7443 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7444 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7445 IEM_MC_FPU_TO_MMX_MODE();
7446
7447 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7448 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7449
7450 IEM_MC_ADVANCE_RIP_AND_FINISH();
7451 IEM_MC_END();
7452 }
7453 else
7454 {
7455 /*
7456 * [mem64], MMX.
7457 */
7458 IEM_MC_BEGIN(0, 0);
7459 IEM_MC_LOCAL(uint64_t, u64Tmp);
7460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7461
7462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7464 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7465 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7466
7467 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7468 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7469 IEM_MC_FPU_TO_MMX_MODE();
7470
7471 IEM_MC_ADVANCE_RIP_AND_FINISH();
7472 IEM_MC_END();
7473 }
7474}
7475
7476/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7477FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7478{
7479 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7481 if (IEM_IS_MODRM_REG_MODE(bRm))
7482 {
7483 /*
7484 * XMM, XMM.
7485 */
7486 IEM_MC_BEGIN(0, 0);
7487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7488 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7489 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7490 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7491 IEM_GET_MODRM_REG(pVCpu, bRm));
7492 IEM_MC_ADVANCE_RIP_AND_FINISH();
7493 IEM_MC_END();
7494 }
7495 else
7496 {
7497 /*
7498 * [mem128], XMM.
7499 */
7500 IEM_MC_BEGIN(0, 0);
7501 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7503
7504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7507 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7508
7509 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7510 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7511
7512 IEM_MC_ADVANCE_RIP_AND_FINISH();
7513 IEM_MC_END();
7514 }
7515}
7516
7517/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7518FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7519{
7520 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7522 if (IEM_IS_MODRM_REG_MODE(bRm))
7523 {
7524 /*
7525 * XMM, XMM.
7526 */
7527 IEM_MC_BEGIN(0, 0);
7528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7529 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7530 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7531 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7532 IEM_GET_MODRM_REG(pVCpu, bRm));
7533 IEM_MC_ADVANCE_RIP_AND_FINISH();
7534 IEM_MC_END();
7535 }
7536 else
7537 {
7538 /*
7539 * [mem128], XMM.
7540 */
7541 IEM_MC_BEGIN(0, 0);
7542 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7544
7545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7549
7550 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7551 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7552
7553 IEM_MC_ADVANCE_RIP_AND_FINISH();
7554 IEM_MC_END();
7555 }
7556}
7557
7558/* Opcode 0xf2 0x0f 0x7f - invalid */
7559
7560
7561/**
7562 * @opcode 0x80
7563 * @opfltest of
7564 */
7565FNIEMOP_DEF(iemOp_jo_Jv)
7566{
7567 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7568 IEMOP_HLP_MIN_386();
7569 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7570 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7571 {
7572 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7573 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7576 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7577 } IEM_MC_ELSE() {
7578 IEM_MC_ADVANCE_RIP_AND_FINISH();
7579 } IEM_MC_ENDIF();
7580 IEM_MC_END();
7581 }
7582 else
7583 {
7584 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7585 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7587 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7588 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7589 } IEM_MC_ELSE() {
7590 IEM_MC_ADVANCE_RIP_AND_FINISH();
7591 } IEM_MC_ENDIF();
7592 IEM_MC_END();
7593 }
7594}
7595
7596
7597/**
7598 * @opcode 0x81
7599 * @opfltest of
7600 */
7601FNIEMOP_DEF(iemOp_jno_Jv)
7602{
7603 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7604 IEMOP_HLP_MIN_386();
7605 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7606 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7607 {
7608 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7609 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7611 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7612 IEM_MC_ADVANCE_RIP_AND_FINISH();
7613 } IEM_MC_ELSE() {
7614 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7615 } IEM_MC_ENDIF();
7616 IEM_MC_END();
7617 }
7618 else
7619 {
7620 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7621 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7624 IEM_MC_ADVANCE_RIP_AND_FINISH();
7625 } IEM_MC_ELSE() {
7626 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7627 } IEM_MC_ENDIF();
7628 IEM_MC_END();
7629 }
7630}
7631
7632
7633/**
7634 * @opcode 0x82
7635 * @opfltest cf
7636 */
7637FNIEMOP_DEF(iemOp_jc_Jv)
7638{
7639 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7640 IEMOP_HLP_MIN_386();
7641 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7642 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7643 {
7644 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7645 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7648 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7649 } IEM_MC_ELSE() {
7650 IEM_MC_ADVANCE_RIP_AND_FINISH();
7651 } IEM_MC_ENDIF();
7652 IEM_MC_END();
7653 }
7654 else
7655 {
7656 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7657 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7659 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7660 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7661 } IEM_MC_ELSE() {
7662 IEM_MC_ADVANCE_RIP_AND_FINISH();
7663 } IEM_MC_ENDIF();
7664 IEM_MC_END();
7665 }
7666}
7667
7668
7669/**
7670 * @opcode 0x83
7671 * @opfltest cf
7672 */
7673FNIEMOP_DEF(iemOp_jnc_Jv)
7674{
7675 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7676 IEMOP_HLP_MIN_386();
7677 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7678 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7679 {
7680 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7681 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7684 IEM_MC_ADVANCE_RIP_AND_FINISH();
7685 } IEM_MC_ELSE() {
7686 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7687 } IEM_MC_ENDIF();
7688 IEM_MC_END();
7689 }
7690 else
7691 {
7692 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7693 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7695 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7696 IEM_MC_ADVANCE_RIP_AND_FINISH();
7697 } IEM_MC_ELSE() {
7698 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7699 } IEM_MC_ENDIF();
7700 IEM_MC_END();
7701 }
7702}
7703
7704
7705/**
7706 * @opcode 0x84
7707 * @opfltest zf
7708 */
7709FNIEMOP_DEF(iemOp_je_Jv)
7710{
7711 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7712 IEMOP_HLP_MIN_386();
7713 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7714 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7715 {
7716 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7717 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7720 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7721 } IEM_MC_ELSE() {
7722 IEM_MC_ADVANCE_RIP_AND_FINISH();
7723 } IEM_MC_ENDIF();
7724 IEM_MC_END();
7725 }
7726 else
7727 {
7728 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7729 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7731 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7732 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7733 } IEM_MC_ELSE() {
7734 IEM_MC_ADVANCE_RIP_AND_FINISH();
7735 } IEM_MC_ENDIF();
7736 IEM_MC_END();
7737 }
7738}
7739
7740
7741/**
7742 * @opcode 0x85
7743 * @opfltest zf
7744 */
7745FNIEMOP_DEF(iemOp_jne_Jv)
7746{
7747 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7748 IEMOP_HLP_MIN_386();
7749 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7750 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7751 {
7752 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7753 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7755 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7756 IEM_MC_ADVANCE_RIP_AND_FINISH();
7757 } IEM_MC_ELSE() {
7758 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7759 } IEM_MC_ENDIF();
7760 IEM_MC_END();
7761 }
7762 else
7763 {
7764 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7765 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7767 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7768 IEM_MC_ADVANCE_RIP_AND_FINISH();
7769 } IEM_MC_ELSE() {
7770 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7771 } IEM_MC_ENDIF();
7772 IEM_MC_END();
7773 }
7774}
7775
7776
7777/**
7778 * @opcode 0x86
7779 * @opfltest cf,zf
7780 */
7781FNIEMOP_DEF(iemOp_jbe_Jv)
7782{
7783 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7784 IEMOP_HLP_MIN_386();
7785 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7786 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7787 {
7788 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7789 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7791 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7792 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7793 } IEM_MC_ELSE() {
7794 IEM_MC_ADVANCE_RIP_AND_FINISH();
7795 } IEM_MC_ENDIF();
7796 IEM_MC_END();
7797 }
7798 else
7799 {
7800 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7801 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7803 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7804 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7805 } IEM_MC_ELSE() {
7806 IEM_MC_ADVANCE_RIP_AND_FINISH();
7807 } IEM_MC_ENDIF();
7808 IEM_MC_END();
7809 }
7810}
7811
7812
7813/**
7814 * @opcode 0x87
7815 * @opfltest cf,zf
7816 */
7817FNIEMOP_DEF(iemOp_jnbe_Jv)
7818{
7819 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7820 IEMOP_HLP_MIN_386();
7821 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7822 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7823 {
7824 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7825 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7827 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7828 IEM_MC_ADVANCE_RIP_AND_FINISH();
7829 } IEM_MC_ELSE() {
7830 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7831 } IEM_MC_ENDIF();
7832 IEM_MC_END();
7833 }
7834 else
7835 {
7836 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7837 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7839 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7840 IEM_MC_ADVANCE_RIP_AND_FINISH();
7841 } IEM_MC_ELSE() {
7842 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7843 } IEM_MC_ENDIF();
7844 IEM_MC_END();
7845 }
7846}
7847
7848
7849/**
7850 * @opcode 0x88
7851 * @opfltest sf
7852 */
7853FNIEMOP_DEF(iemOp_js_Jv)
7854{
7855 IEMOP_MNEMONIC(js_Jv, "js Jv");
7856 IEMOP_HLP_MIN_386();
7857 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7858 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7859 {
7860 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7861 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7864 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7865 } IEM_MC_ELSE() {
7866 IEM_MC_ADVANCE_RIP_AND_FINISH();
7867 } IEM_MC_ENDIF();
7868 IEM_MC_END();
7869 }
7870 else
7871 {
7872 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7873 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7875 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7876 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7877 } IEM_MC_ELSE() {
7878 IEM_MC_ADVANCE_RIP_AND_FINISH();
7879 } IEM_MC_ENDIF();
7880 IEM_MC_END();
7881 }
7882}
7883
7884
7885/**
7886 * @opcode 0x89
7887 * @opfltest sf
7888 */
7889FNIEMOP_DEF(iemOp_jns_Jv)
7890{
7891 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7892 IEMOP_HLP_MIN_386();
7893 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7894 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7895 {
7896 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7897 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7900 IEM_MC_ADVANCE_RIP_AND_FINISH();
7901 } IEM_MC_ELSE() {
7902 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7903 } IEM_MC_ENDIF();
7904 IEM_MC_END();
7905 }
7906 else
7907 {
7908 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7909 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7911 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7912 IEM_MC_ADVANCE_RIP_AND_FINISH();
7913 } IEM_MC_ELSE() {
7914 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7915 } IEM_MC_ENDIF();
7916 IEM_MC_END();
7917 }
7918}
7919
7920
7921/**
7922 * @opcode 0x8a
7923 * @opfltest pf
7924 */
7925FNIEMOP_DEF(iemOp_jp_Jv)
7926{
7927 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7928 IEMOP_HLP_MIN_386();
7929 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7930 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7931 {
7932 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7933 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7936 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7937 } IEM_MC_ELSE() {
7938 IEM_MC_ADVANCE_RIP_AND_FINISH();
7939 } IEM_MC_ENDIF();
7940 IEM_MC_END();
7941 }
7942 else
7943 {
7944 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7945 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7947 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7948 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7949 } IEM_MC_ELSE() {
7950 IEM_MC_ADVANCE_RIP_AND_FINISH();
7951 } IEM_MC_ENDIF();
7952 IEM_MC_END();
7953 }
7954}
7955
7956
7957/**
7958 * @opcode 0x8b
7959 * @opfltest pf
7960 */
7961FNIEMOP_DEF(iemOp_jnp_Jv)
7962{
7963 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7964 IEMOP_HLP_MIN_386();
7965 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7966 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7967 {
7968 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7969 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7971 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7972 IEM_MC_ADVANCE_RIP_AND_FINISH();
7973 } IEM_MC_ELSE() {
7974 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7975 } IEM_MC_ENDIF();
7976 IEM_MC_END();
7977 }
7978 else
7979 {
7980 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7981 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7983 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7984 IEM_MC_ADVANCE_RIP_AND_FINISH();
7985 } IEM_MC_ELSE() {
7986 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7987 } IEM_MC_ENDIF();
7988 IEM_MC_END();
7989 }
7990}
7991
7992
7993/**
7994 * @opcode 0x8c
7995 * @opfltest sf,of
7996 */
7997FNIEMOP_DEF(iemOp_jl_Jv)
7998{
7999 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8000 IEMOP_HLP_MIN_386();
8001 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8002 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8003 {
8004 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8005 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8007 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8008 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8009 } IEM_MC_ELSE() {
8010 IEM_MC_ADVANCE_RIP_AND_FINISH();
8011 } IEM_MC_ENDIF();
8012 IEM_MC_END();
8013 }
8014 else
8015 {
8016 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8017 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8019 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8020 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8021 } IEM_MC_ELSE() {
8022 IEM_MC_ADVANCE_RIP_AND_FINISH();
8023 } IEM_MC_ENDIF();
8024 IEM_MC_END();
8025 }
8026}
8027
8028
8029/**
8030 * @opcode 0x8d
8031 * @opfltest sf,of
8032 */
8033FNIEMOP_DEF(iemOp_jnl_Jv)
8034{
8035 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8036 IEMOP_HLP_MIN_386();
8037 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8038 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8039 {
8040 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8041 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8043 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8044 IEM_MC_ADVANCE_RIP_AND_FINISH();
8045 } IEM_MC_ELSE() {
8046 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8047 } IEM_MC_ENDIF();
8048 IEM_MC_END();
8049 }
8050 else
8051 {
8052 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8053 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8055 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8056 IEM_MC_ADVANCE_RIP_AND_FINISH();
8057 } IEM_MC_ELSE() {
8058 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8059 } IEM_MC_ENDIF();
8060 IEM_MC_END();
8061 }
8062}
8063
8064
8065/**
8066 * @opcode 0x8e
8067 * @opfltest zf,sf,of
8068 */
8069FNIEMOP_DEF(iemOp_jle_Jv)
8070{
8071 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8072 IEMOP_HLP_MIN_386();
8073 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8074 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8075 {
8076 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8077 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8079 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8080 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8081 } IEM_MC_ELSE() {
8082 IEM_MC_ADVANCE_RIP_AND_FINISH();
8083 } IEM_MC_ENDIF();
8084 IEM_MC_END();
8085 }
8086 else
8087 {
8088 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8089 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8091 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8092 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8093 } IEM_MC_ELSE() {
8094 IEM_MC_ADVANCE_RIP_AND_FINISH();
8095 } IEM_MC_ENDIF();
8096 IEM_MC_END();
8097 }
8098}
8099
8100
8101/**
8102 * @opcode 0x8f
8103 * @opfltest zf,sf,of
8104 */
8105FNIEMOP_DEF(iemOp_jnle_Jv)
8106{
8107 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8108 IEMOP_HLP_MIN_386();
8109 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8110 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8111 {
8112 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8113 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8115 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8116 IEM_MC_ADVANCE_RIP_AND_FINISH();
8117 } IEM_MC_ELSE() {
8118 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8119 } IEM_MC_ENDIF();
8120 IEM_MC_END();
8121 }
8122 else
8123 {
8124 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8125 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8127 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8128 IEM_MC_ADVANCE_RIP_AND_FINISH();
8129 } IEM_MC_ELSE() {
8130 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8131 } IEM_MC_ENDIF();
8132 IEM_MC_END();
8133 }
8134}
8135
8136
8137/**
8138 * @opcode 0x90
8139 * @opfltest of
8140 */
8141FNIEMOP_DEF(iemOp_seto_Eb)
8142{
8143 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8144 IEMOP_HLP_MIN_386();
8145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8146
8147 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8148 * any way. AMD says it's "unused", whatever that means. We're
8149 * ignoring for now. */
8150 if (IEM_IS_MODRM_REG_MODE(bRm))
8151 {
8152 /* register target */
8153 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8155 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8156 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8157 } IEM_MC_ELSE() {
8158 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8159 } IEM_MC_ENDIF();
8160 IEM_MC_ADVANCE_RIP_AND_FINISH();
8161 IEM_MC_END();
8162 }
8163 else
8164 {
8165 /* memory target */
8166 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8170 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8171 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8172 } IEM_MC_ELSE() {
8173 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8174 } IEM_MC_ENDIF();
8175 IEM_MC_ADVANCE_RIP_AND_FINISH();
8176 IEM_MC_END();
8177 }
8178}
8179
8180
8181/**
8182 * @opcode 0x91
8183 * @opfltest of
8184 */
8185FNIEMOP_DEF(iemOp_setno_Eb)
8186{
8187 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8188 IEMOP_HLP_MIN_386();
8189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8190
8191 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8192 * any way. AMD says it's "unused", whatever that means. We're
8193 * ignoring for now. */
8194 if (IEM_IS_MODRM_REG_MODE(bRm))
8195 {
8196 /* register target */
8197 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8199 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8200 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8201 } IEM_MC_ELSE() {
8202 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8203 } IEM_MC_ENDIF();
8204 IEM_MC_ADVANCE_RIP_AND_FINISH();
8205 IEM_MC_END();
8206 }
8207 else
8208 {
8209 /* memory target */
8210 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8214 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8215 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8216 } IEM_MC_ELSE() {
8217 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8218 } IEM_MC_ENDIF();
8219 IEM_MC_ADVANCE_RIP_AND_FINISH();
8220 IEM_MC_END();
8221 }
8222}
8223
8224
8225/**
8226 * @opcode 0x92
8227 * @opfltest cf
8228 */
8229FNIEMOP_DEF(iemOp_setc_Eb)
8230{
8231 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8232 IEMOP_HLP_MIN_386();
8233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8234
8235 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8236 * any way. AMD says it's "unused", whatever that means. We're
8237 * ignoring for now. */
8238 if (IEM_IS_MODRM_REG_MODE(bRm))
8239 {
8240 /* register target */
8241 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8243 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8244 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8245 } IEM_MC_ELSE() {
8246 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8247 } IEM_MC_ENDIF();
8248 IEM_MC_ADVANCE_RIP_AND_FINISH();
8249 IEM_MC_END();
8250 }
8251 else
8252 {
8253 /* memory target */
8254 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8258 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8259 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8260 } IEM_MC_ELSE() {
8261 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8262 } IEM_MC_ENDIF();
8263 IEM_MC_ADVANCE_RIP_AND_FINISH();
8264 IEM_MC_END();
8265 }
8266}
8267
8268
8269/**
8270 * @opcode 0x93
8271 * @opfltest cf
8272 */
8273FNIEMOP_DEF(iemOp_setnc_Eb)
8274{
8275 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8276 IEMOP_HLP_MIN_386();
8277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8278
8279 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8280 * any way. AMD says it's "unused", whatever that means. We're
8281 * ignoring for now. */
8282 if (IEM_IS_MODRM_REG_MODE(bRm))
8283 {
8284 /* register target */
8285 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8287 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8288 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8289 } IEM_MC_ELSE() {
8290 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8291 } IEM_MC_ENDIF();
8292 IEM_MC_ADVANCE_RIP_AND_FINISH();
8293 IEM_MC_END();
8294 }
8295 else
8296 {
8297 /* memory target */
8298 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8303 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8304 } IEM_MC_ELSE() {
8305 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8306 } IEM_MC_ENDIF();
8307 IEM_MC_ADVANCE_RIP_AND_FINISH();
8308 IEM_MC_END();
8309 }
8310}
8311
8312
8313/**
8314 * @opcode 0x94
8315 * @opfltest zf
8316 */
8317FNIEMOP_DEF(iemOp_sete_Eb)
8318{
8319 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8320 IEMOP_HLP_MIN_386();
8321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8322
8323 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8324 * any way. AMD says it's "unused", whatever that means. We're
8325 * ignoring for now. */
8326 if (IEM_IS_MODRM_REG_MODE(bRm))
8327 {
8328 /* register target */
8329 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8331 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8332 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8333 } IEM_MC_ELSE() {
8334 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8335 } IEM_MC_ENDIF();
8336 IEM_MC_ADVANCE_RIP_AND_FINISH();
8337 IEM_MC_END();
8338 }
8339 else
8340 {
8341 /* memory target */
8342 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8346 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8347 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8348 } IEM_MC_ELSE() {
8349 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8350 } IEM_MC_ENDIF();
8351 IEM_MC_ADVANCE_RIP_AND_FINISH();
8352 IEM_MC_END();
8353 }
8354}
8355
8356
8357/**
8358 * @opcode 0x95
8359 * @opfltest zf
8360 */
8361FNIEMOP_DEF(iemOp_setne_Eb)
8362{
8363 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8364 IEMOP_HLP_MIN_386();
8365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8366
8367 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8368 * any way. AMD says it's "unused", whatever that means. We're
8369 * ignoring for now. */
8370 if (IEM_IS_MODRM_REG_MODE(bRm))
8371 {
8372 /* register target */
8373 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8375 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8376 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8377 } IEM_MC_ELSE() {
8378 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8379 } IEM_MC_ENDIF();
8380 IEM_MC_ADVANCE_RIP_AND_FINISH();
8381 IEM_MC_END();
8382 }
8383 else
8384 {
8385 /* memory target */
8386 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8390 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8391 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8392 } IEM_MC_ELSE() {
8393 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8394 } IEM_MC_ENDIF();
8395 IEM_MC_ADVANCE_RIP_AND_FINISH();
8396 IEM_MC_END();
8397 }
8398}
8399
8400
8401/**
8402 * @opcode 0x96
8403 * @opfltest cf,zf
8404 */
8405FNIEMOP_DEF(iemOp_setbe_Eb)
8406{
8407 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8408 IEMOP_HLP_MIN_386();
8409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8410
8411 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8412 * any way. AMD says it's "unused", whatever that means. We're
8413 * ignoring for now. */
8414 if (IEM_IS_MODRM_REG_MODE(bRm))
8415 {
8416 /* register target */
8417 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8419 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8420 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8421 } IEM_MC_ELSE() {
8422 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8423 } IEM_MC_ENDIF();
8424 IEM_MC_ADVANCE_RIP_AND_FINISH();
8425 IEM_MC_END();
8426 }
8427 else
8428 {
8429 /* memory target */
8430 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8434 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8435 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8436 } IEM_MC_ELSE() {
8437 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8438 } IEM_MC_ENDIF();
8439 IEM_MC_ADVANCE_RIP_AND_FINISH();
8440 IEM_MC_END();
8441 }
8442}
8443
8444
8445/**
8446 * @opcode 0x97
8447 * @opfltest cf,zf
8448 */
8449FNIEMOP_DEF(iemOp_setnbe_Eb)
8450{
8451 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8452 IEMOP_HLP_MIN_386();
8453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8454
8455 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8456 * any way. AMD says it's "unused", whatever that means. We're
8457 * ignoring for now. */
8458 if (IEM_IS_MODRM_REG_MODE(bRm))
8459 {
8460 /* register target */
8461 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8463 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8464 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8465 } IEM_MC_ELSE() {
8466 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8467 } IEM_MC_ENDIF();
8468 IEM_MC_ADVANCE_RIP_AND_FINISH();
8469 IEM_MC_END();
8470 }
8471 else
8472 {
8473 /* memory target */
8474 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8478 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8479 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8480 } IEM_MC_ELSE() {
8481 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8482 } IEM_MC_ENDIF();
8483 IEM_MC_ADVANCE_RIP_AND_FINISH();
8484 IEM_MC_END();
8485 }
8486}
8487
8488
8489/**
8490 * @opcode 0x98
8491 * @opfltest sf
8492 */
8493FNIEMOP_DEF(iemOp_sets_Eb)
8494{
8495 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8496 IEMOP_HLP_MIN_386();
8497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8498
8499 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8500 * any way. AMD says it's "unused", whatever that means. We're
8501 * ignoring for now. */
8502 if (IEM_IS_MODRM_REG_MODE(bRm))
8503 {
8504 /* register target */
8505 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8507 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8508 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8509 } IEM_MC_ELSE() {
8510 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8511 } IEM_MC_ENDIF();
8512 IEM_MC_ADVANCE_RIP_AND_FINISH();
8513 IEM_MC_END();
8514 }
8515 else
8516 {
8517 /* memory target */
8518 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8522 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8523 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8524 } IEM_MC_ELSE() {
8525 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8526 } IEM_MC_ENDIF();
8527 IEM_MC_ADVANCE_RIP_AND_FINISH();
8528 IEM_MC_END();
8529 }
8530}
8531
8532
8533/**
8534 * @opcode 0x99
8535 * @opfltest sf
8536 */
8537FNIEMOP_DEF(iemOp_setns_Eb)
8538{
8539 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8540 IEMOP_HLP_MIN_386();
8541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8542
8543 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8544 * any way. AMD says it's "unused", whatever that means. We're
8545 * ignoring for now. */
8546 if (IEM_IS_MODRM_REG_MODE(bRm))
8547 {
8548 /* register target */
8549 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8551 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8552 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8553 } IEM_MC_ELSE() {
8554 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8555 } IEM_MC_ENDIF();
8556 IEM_MC_ADVANCE_RIP_AND_FINISH();
8557 IEM_MC_END();
8558 }
8559 else
8560 {
8561 /* memory target */
8562 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8567 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8568 } IEM_MC_ELSE() {
8569 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8570 } IEM_MC_ENDIF();
8571 IEM_MC_ADVANCE_RIP_AND_FINISH();
8572 IEM_MC_END();
8573 }
8574}
8575
8576
8577/**
8578 * @opcode 0x9a
8579 * @opfltest pf
8580 */
8581FNIEMOP_DEF(iemOp_setp_Eb)
8582{
8583 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8584 IEMOP_HLP_MIN_386();
8585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8586
8587 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8588 * any way. AMD says it's "unused", whatever that means. We're
8589 * ignoring for now. */
8590 if (IEM_IS_MODRM_REG_MODE(bRm))
8591 {
8592 /* register target */
8593 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8596 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8597 } IEM_MC_ELSE() {
8598 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8599 } IEM_MC_ENDIF();
8600 IEM_MC_ADVANCE_RIP_AND_FINISH();
8601 IEM_MC_END();
8602 }
8603 else
8604 {
8605 /* memory target */
8606 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8610 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8611 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8612 } IEM_MC_ELSE() {
8613 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8614 } IEM_MC_ENDIF();
8615 IEM_MC_ADVANCE_RIP_AND_FINISH();
8616 IEM_MC_END();
8617 }
8618}
8619
8620
8621/**
8622 * @opcode 0x9b
8623 * @opfltest pf
8624 */
8625FNIEMOP_DEF(iemOp_setnp_Eb)
8626{
8627 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8628 IEMOP_HLP_MIN_386();
8629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8630
8631 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8632 * any way. AMD says it's "unused", whatever that means. We're
8633 * ignoring for now. */
8634 if (IEM_IS_MODRM_REG_MODE(bRm))
8635 {
8636 /* register target */
8637 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8639 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8640 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8641 } IEM_MC_ELSE() {
8642 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8643 } IEM_MC_ENDIF();
8644 IEM_MC_ADVANCE_RIP_AND_FINISH();
8645 IEM_MC_END();
8646 }
8647 else
8648 {
8649 /* memory target */
8650 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8655 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8656 } IEM_MC_ELSE() {
8657 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8658 } IEM_MC_ENDIF();
8659 IEM_MC_ADVANCE_RIP_AND_FINISH();
8660 IEM_MC_END();
8661 }
8662}
8663
8664
8665/**
8666 * @opcode 0x9c
8667 * @opfltest sf,of
8668 */
8669FNIEMOP_DEF(iemOp_setl_Eb)
8670{
8671 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8672 IEMOP_HLP_MIN_386();
8673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8674
8675 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8676 * any way. AMD says it's "unused", whatever that means. We're
8677 * ignoring for now. */
8678 if (IEM_IS_MODRM_REG_MODE(bRm))
8679 {
8680 /* register target */
8681 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8683 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8684 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8685 } IEM_MC_ELSE() {
8686 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8687 } IEM_MC_ENDIF();
8688 IEM_MC_ADVANCE_RIP_AND_FINISH();
8689 IEM_MC_END();
8690 }
8691 else
8692 {
8693 /* memory target */
8694 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8698 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8699 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8700 } IEM_MC_ELSE() {
8701 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8702 } IEM_MC_ENDIF();
8703 IEM_MC_ADVANCE_RIP_AND_FINISH();
8704 IEM_MC_END();
8705 }
8706}
8707
8708
8709/**
8710 * @opcode 0x9d
8711 * @opfltest sf,of
8712 */
8713FNIEMOP_DEF(iemOp_setnl_Eb)
8714{
8715 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8716 IEMOP_HLP_MIN_386();
8717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8718
8719 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8720 * any way. AMD says it's "unused", whatever that means. We're
8721 * ignoring for now. */
8722 if (IEM_IS_MODRM_REG_MODE(bRm))
8723 {
8724 /* register target */
8725 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8727 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8728 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8729 } IEM_MC_ELSE() {
8730 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8731 } IEM_MC_ENDIF();
8732 IEM_MC_ADVANCE_RIP_AND_FINISH();
8733 IEM_MC_END();
8734 }
8735 else
8736 {
8737 /* memory target */
8738 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8742 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8743 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8744 } IEM_MC_ELSE() {
8745 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8746 } IEM_MC_ENDIF();
8747 IEM_MC_ADVANCE_RIP_AND_FINISH();
8748 IEM_MC_END();
8749 }
8750}
8751
8752
8753/**
8754 * @opcode 0x9e
8755 * @opfltest zf,sf,of
8756 */
8757FNIEMOP_DEF(iemOp_setle_Eb)
8758{
8759 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8760 IEMOP_HLP_MIN_386();
8761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8762
8763 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8764 * any way. AMD says it's "unused", whatever that means. We're
8765 * ignoring for now. */
8766 if (IEM_IS_MODRM_REG_MODE(bRm))
8767 {
8768 /* register target */
8769 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8771 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8772 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8773 } IEM_MC_ELSE() {
8774 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8775 } IEM_MC_ENDIF();
8776 IEM_MC_ADVANCE_RIP_AND_FINISH();
8777 IEM_MC_END();
8778 }
8779 else
8780 {
8781 /* memory target */
8782 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8786 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8787 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8788 } IEM_MC_ELSE() {
8789 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8790 } IEM_MC_ENDIF();
8791 IEM_MC_ADVANCE_RIP_AND_FINISH();
8792 IEM_MC_END();
8793 }
8794}
8795
8796
8797/**
8798 * @opcode 0x9f
8799 * @opfltest zf,sf,of
8800 */
8801FNIEMOP_DEF(iemOp_setnle_Eb)
8802{
8803 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8804 IEMOP_HLP_MIN_386();
8805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8806
8807 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8808 * any way. AMD says it's "unused", whatever that means. We're
8809 * ignoring for now. */
8810 if (IEM_IS_MODRM_REG_MODE(bRm))
8811 {
8812 /* register target */
8813 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8815 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8816 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8817 } IEM_MC_ELSE() {
8818 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8819 } IEM_MC_ENDIF();
8820 IEM_MC_ADVANCE_RIP_AND_FINISH();
8821 IEM_MC_END();
8822 }
8823 else
8824 {
8825 /* memory target */
8826 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8830 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8831 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8832 } IEM_MC_ELSE() {
8833 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8834 } IEM_MC_ENDIF();
8835 IEM_MC_ADVANCE_RIP_AND_FINISH();
8836 IEM_MC_END();
8837 }
8838}
8839
8840
8841/** Opcode 0x0f 0xa0. */
8842FNIEMOP_DEF(iemOp_push_fs)
8843{
8844 IEMOP_MNEMONIC(push_fs, "push fs");
8845 IEMOP_HLP_MIN_386();
8846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8847 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8848}
8849
8850
8851/** Opcode 0x0f 0xa1. */
8852FNIEMOP_DEF(iemOp_pop_fs)
8853{
8854 IEMOP_MNEMONIC(pop_fs, "pop fs");
8855 IEMOP_HLP_MIN_386();
8856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8857 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8858 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8859 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8860 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8861 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8862 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8863 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8864 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8865}
8866
8867
8868/** Opcode 0x0f 0xa2. */
8869FNIEMOP_DEF(iemOp_cpuid)
8870{
8871 IEMOP_MNEMONIC(cpuid, "cpuid");
8872 IEMOP_HLP_MIN_486(); /* not all 486es. */
8873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8874 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8875 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8876 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8877 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8878 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8879 iemCImpl_cpuid);
8880}
8881
8882
8883/**
8884 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8885 * iemOp_bts_Ev_Gv.
8886 */
8887
8888#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8890 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8891 \
8892 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8893 { \
8894 /* register destination. */ \
8895 switch (pVCpu->iem.s.enmEffOpSize) \
8896 { \
8897 case IEMMODE_16BIT: \
8898 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8900 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8901 IEM_MC_ARG(uint16_t, u16Src, 1); \
8902 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8903 \
8904 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8905 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8906 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8907 IEM_MC_REF_EFLAGS(pEFlags); \
8908 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8909 \
8910 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8911 IEM_MC_END(); \
8912 break; \
8913 \
8914 case IEMMODE_32BIT: \
8915 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8917 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8918 IEM_MC_ARG(uint32_t, u32Src, 1); \
8919 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8920 \
8921 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8922 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8923 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8924 IEM_MC_REF_EFLAGS(pEFlags); \
8925 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8926 \
8927 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8928 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8929 IEM_MC_END(); \
8930 break; \
8931 \
8932 case IEMMODE_64BIT: \
8933 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8935 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8936 IEM_MC_ARG(uint64_t, u64Src, 1); \
8937 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8938 \
8939 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8940 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8941 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8942 IEM_MC_REF_EFLAGS(pEFlags); \
8943 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
8944 \
8945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8946 IEM_MC_END(); \
8947 break; \
8948 \
8949 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8950 } \
8951 } \
8952 else \
8953 { \
8954 /* memory destination. */ \
8955 /** @todo test negative bit offsets! */ \
8956 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8957 { \
8958 switch (pVCpu->iem.s.enmEffOpSize) \
8959 { \
8960 case IEMMODE_16BIT: \
8961 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8964 IEMOP_HLP_DONE_DECODING(); \
8965 \
8966 IEM_MC_ARG(uint16_t, u16Src, 1); \
8967 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8968 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8969 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8970 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8971 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8972 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8973 \
8974 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8975 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8976 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8977 \
8978 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8979 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8980 \
8981 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8982 IEM_MC_COMMIT_EFLAGS(EFlags); \
8983 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8984 IEM_MC_END(); \
8985 break; \
8986 \
8987 case IEMMODE_32BIT: \
8988 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8991 IEMOP_HLP_DONE_DECODING(); \
8992 \
8993 IEM_MC_ARG(uint32_t, u32Src, 1); \
8994 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8995 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8996 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8997 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8998 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8999 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9000 \
9001 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9002 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9003 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9004 \
9005 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9006 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9007 \
9008 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9009 IEM_MC_COMMIT_EFLAGS(EFlags); \
9010 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9011 IEM_MC_END(); \
9012 break; \
9013 \
9014 case IEMMODE_64BIT: \
9015 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9018 IEMOP_HLP_DONE_DECODING(); \
9019 \
9020 IEM_MC_ARG(uint64_t, u64Src, 1); \
9021 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9022 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9023 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9024 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9025 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9026 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9027 \
9028 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9029 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9030 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9031 \
9032 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9033 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9034 \
9035 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9036 IEM_MC_COMMIT_EFLAGS(EFlags); \
9037 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9038 IEM_MC_END(); \
9039 break; \
9040 \
9041 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9042 } \
9043 } \
9044 else \
9045 { \
9046 (void)0
9047/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9048#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9049 switch (pVCpu->iem.s.enmEffOpSize) \
9050 { \
9051 case IEMMODE_16BIT: \
9052 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9055 IEMOP_HLP_DONE_DECODING(); \
9056 \
9057 IEM_MC_ARG(uint16_t, u16Src, 1); \
9058 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9059 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9060 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9061 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9062 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9063 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9064 \
9065 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9066 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9067 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9068 \
9069 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9070 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9071 \
9072 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9073 IEM_MC_COMMIT_EFLAGS(EFlags); \
9074 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9075 IEM_MC_END(); \
9076 break; \
9077 \
9078 case IEMMODE_32BIT: \
9079 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9082 IEMOP_HLP_DONE_DECODING(); \
9083 \
9084 IEM_MC_ARG(uint32_t, u32Src, 1); \
9085 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9086 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9087 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9088 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9089 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9090 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9091 \
9092 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9093 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9094 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9095 \
9096 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9097 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9098 \
9099 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9100 IEM_MC_COMMIT_EFLAGS(EFlags); \
9101 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9102 IEM_MC_END(); \
9103 break; \
9104 \
9105 case IEMMODE_64BIT: \
9106 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9109 IEMOP_HLP_DONE_DECODING(); \
9110 \
9111 IEM_MC_ARG(uint64_t, u64Src, 1); \
9112 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9113 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9114 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9115 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9116 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9117 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9118 \
9119 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9120 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9121 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9122 \
9123 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9124 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9125 \
9126 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9127 IEM_MC_COMMIT_EFLAGS(EFlags); \
9128 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9129 IEM_MC_END(); \
9130 break; \
9131 \
9132 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9133 } \
9134 } \
9135 } \
9136 (void)0
9137
9138/* Read-only version (bt). */
9139#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9141 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9142 \
9143 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9144 { \
9145 /* register destination. */ \
9146 switch (pVCpu->iem.s.enmEffOpSize) \
9147 { \
9148 case IEMMODE_16BIT: \
9149 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9151 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9152 IEM_MC_ARG(uint16_t, u16Src, 1); \
9153 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9154 \
9155 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9156 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9157 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9158 IEM_MC_REF_EFLAGS(pEFlags); \
9159 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9160 \
9161 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9162 IEM_MC_END(); \
9163 break; \
9164 \
9165 case IEMMODE_32BIT: \
9166 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9168 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9169 IEM_MC_ARG(uint32_t, u32Src, 1); \
9170 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9171 \
9172 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9173 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9174 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9175 IEM_MC_REF_EFLAGS(pEFlags); \
9176 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9177 \
9178 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9179 IEM_MC_END(); \
9180 break; \
9181 \
9182 case IEMMODE_64BIT: \
9183 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9185 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9186 IEM_MC_ARG(uint64_t, u64Src, 1); \
9187 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9188 \
9189 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9190 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9191 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9192 IEM_MC_REF_EFLAGS(pEFlags); \
9193 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9194 \
9195 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9196 IEM_MC_END(); \
9197 break; \
9198 \
9199 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9200 } \
9201 } \
9202 else \
9203 { \
9204 /* memory destination. */ \
9205 /** @todo test negative bit offsets! */ \
9206 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9207 { \
9208 switch (pVCpu->iem.s.enmEffOpSize) \
9209 { \
9210 case IEMMODE_16BIT: \
9211 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9214 IEMOP_HLP_DONE_DECODING(); \
9215 \
9216 IEM_MC_ARG(uint16_t, u16Src, 1); \
9217 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9218 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9219 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9220 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9221 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9222 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9223 \
9224 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9225 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9226 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9227 \
9228 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9229 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9230 \
9231 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9232 IEM_MC_COMMIT_EFLAGS(EFlags); \
9233 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9234 IEM_MC_END(); \
9235 break; \
9236 \
9237 case IEMMODE_32BIT: \
9238 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9241 IEMOP_HLP_DONE_DECODING(); \
9242 \
9243 IEM_MC_ARG(uint32_t, u32Src, 1); \
9244 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9245 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9246 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9247 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9248 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9249 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9250 \
9251 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9252 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9253 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9254 \
9255 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9256 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9257 \
9258 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9259 IEM_MC_COMMIT_EFLAGS(EFlags); \
9260 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9261 IEM_MC_END(); \
9262 break; \
9263 \
9264 case IEMMODE_64BIT: \
9265 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9268 IEMOP_HLP_DONE_DECODING(); \
9269 \
9270 IEM_MC_ARG(uint64_t, u64Src, 1); \
9271 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9272 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9273 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9274 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9275 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9276 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9277 \
9278 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9279 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9280 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9281 \
9282 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9284 \
9285 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9286 IEM_MC_COMMIT_EFLAGS(EFlags); \
9287 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9288 IEM_MC_END(); \
9289 break; \
9290 \
9291 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9292 } \
9293 } \
9294 else \
9295 { \
9296 IEMOP_HLP_DONE_DECODING(); \
9297 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9298 } \
9299 } \
9300 (void)0
9301
9302
9303/**
9304 * @opcode 0xa3
9305 * @oppfx n/a
9306 * @opflclass bitmap
9307 */
9308FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9309{
9310 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9311 IEMOP_HLP_MIN_386();
9312 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9313}
9314
9315
9316/**
9317 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9318 */
9319#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9320 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9321 \
9322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9324 \
9325 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9326 { \
9327 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9328 \
9329 switch (pVCpu->iem.s.enmEffOpSize) \
9330 { \
9331 case IEMMODE_16BIT: \
9332 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9334 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9335 IEM_MC_ARG(uint16_t, u16Src, 1); \
9336 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9337 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9338 \
9339 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9340 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9341 IEM_MC_REF_EFLAGS(pEFlags); \
9342 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9343 \
9344 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9345 IEM_MC_END(); \
9346 break; \
9347 \
9348 case IEMMODE_32BIT: \
9349 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9351 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9352 IEM_MC_ARG(uint32_t, u32Src, 1); \
9353 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9354 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9355 \
9356 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9357 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9358 IEM_MC_REF_EFLAGS(pEFlags); \
9359 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9360 \
9361 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9362 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9363 IEM_MC_END(); \
9364 break; \
9365 \
9366 case IEMMODE_64BIT: \
9367 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9369 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9370 IEM_MC_ARG(uint64_t, u64Src, 1); \
9371 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9372 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9373 \
9374 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9375 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9376 IEM_MC_REF_EFLAGS(pEFlags); \
9377 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9378 \
9379 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9380 IEM_MC_END(); \
9381 break; \
9382 \
9383 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9384 } \
9385 } \
9386 else \
9387 { \
9388 switch (pVCpu->iem.s.enmEffOpSize) \
9389 { \
9390 case IEMMODE_16BIT: \
9391 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9394 \
9395 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9397 \
9398 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9399 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9400 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9401 \
9402 IEM_MC_ARG(uint16_t, u16Src, 1); \
9403 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9404 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9405 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9406 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9407 \
9408 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9409 IEM_MC_COMMIT_EFLAGS(EFlags); \
9410 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9411 IEM_MC_END(); \
9412 break; \
9413 \
9414 case IEMMODE_32BIT: \
9415 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9418 \
9419 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9421 \
9422 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9423 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9424 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9425 \
9426 IEM_MC_ARG(uint32_t, u32Src, 1); \
9427 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9428 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9429 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9430 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9431 \
9432 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9433 IEM_MC_COMMIT_EFLAGS(EFlags); \
9434 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9435 IEM_MC_END(); \
9436 break; \
9437 \
9438 case IEMMODE_64BIT: \
9439 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9442 \
9443 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9445 \
9446 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9447 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9448 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9449 \
9450 IEM_MC_ARG(uint64_t, u64Src, 1); \
9451 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9452 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9453 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9454 \
9455 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9456 \
9457 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9458 IEM_MC_COMMIT_EFLAGS(EFlags); \
9459 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9460 IEM_MC_END(); \
9461 break; \
9462 \
9463 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9464 } \
9465 } (void)0
9466
9467
9468/**
9469 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9470 */
9471#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9472 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9473 \
9474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9475 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9476 \
9477 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9478 { \
9479 switch (pVCpu->iem.s.enmEffOpSize) \
9480 { \
9481 case IEMMODE_16BIT: \
9482 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9484 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9485 IEM_MC_ARG(uint16_t, u16Src, 1); \
9486 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9487 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9488 \
9489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9490 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9491 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9492 IEM_MC_REF_EFLAGS(pEFlags); \
9493 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9494 \
9495 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9496 IEM_MC_END(); \
9497 break; \
9498 \
9499 case IEMMODE_32BIT: \
9500 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9502 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9503 IEM_MC_ARG(uint32_t, u32Src, 1); \
9504 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9505 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9506 \
9507 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9508 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9509 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9510 IEM_MC_REF_EFLAGS(pEFlags); \
9511 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9512 \
9513 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9514 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9515 IEM_MC_END(); \
9516 break; \
9517 \
9518 case IEMMODE_64BIT: \
9519 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9521 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9522 IEM_MC_ARG(uint64_t, u64Src, 1); \
9523 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9524 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9525 \
9526 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9527 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9528 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9529 IEM_MC_REF_EFLAGS(pEFlags); \
9530 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9531 \
9532 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9533 IEM_MC_END(); \
9534 break; \
9535 \
9536 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9537 } \
9538 } \
9539 else \
9540 { \
9541 switch (pVCpu->iem.s.enmEffOpSize) \
9542 { \
9543 case IEMMODE_16BIT: \
9544 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9545 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9546 IEM_MC_ARG(uint16_t, u16Src, 1); \
9547 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9549 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9550 \
9551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9553 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9554 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9555 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9556 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9557 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9558 \
9559 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9560 IEM_MC_COMMIT_EFLAGS(EFlags); \
9561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9562 IEM_MC_END(); \
9563 break; \
9564 \
9565 case IEMMODE_32BIT: \
9566 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9567 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9568 IEM_MC_ARG(uint32_t, u32Src, 1); \
9569 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9572 \
9573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9575 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9576 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9577 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9578 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9579 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9580 \
9581 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9582 IEM_MC_COMMIT_EFLAGS(EFlags); \
9583 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9584 IEM_MC_END(); \
9585 break; \
9586 \
9587 case IEMMODE_64BIT: \
9588 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9589 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9590 IEM_MC_ARG(uint64_t, u64Src, 1); \
9591 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9593 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9594 \
9595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9597 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9598 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9599 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9600 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9601 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9602 \
9603 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9604 IEM_MC_COMMIT_EFLAGS(EFlags); \
9605 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9606 IEM_MC_END(); \
9607 break; \
9608 \
9609 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9610 } \
9611 } (void)0
9612
9613
9614/**
9615 * @opcode 0xa4
9616 * @opflclass shift_count
9617 */
9618FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9619{
9620 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9621 IEMOP_HLP_MIN_386();
9622 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9623}
9624
9625
9626/**
9627 * @opcode 0xa5
9628 * @opflclass shift_count
9629 */
9630FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9631{
9632 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9633 IEMOP_HLP_MIN_386();
9634 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9635}
9636
9637
9638/** Opcode 0x0f 0xa8. */
9639FNIEMOP_DEF(iemOp_push_gs)
9640{
9641 IEMOP_MNEMONIC(push_gs, "push gs");
9642 IEMOP_HLP_MIN_386();
9643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9644 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9645}
9646
9647
9648/** Opcode 0x0f 0xa9. */
9649FNIEMOP_DEF(iemOp_pop_gs)
9650{
9651 IEMOP_MNEMONIC(pop_gs, "pop gs");
9652 IEMOP_HLP_MIN_386();
9653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9654 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9655 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9656 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9657 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9658 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9659 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9660 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9661 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9662}
9663
9664
9665/** Opcode 0x0f 0xaa. */
9666FNIEMOP_DEF(iemOp_rsm)
9667{
9668 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9669 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9671 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9672 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9673 iemCImpl_rsm);
9674}
9675
9676
9677
9678/**
9679 * @opcode 0xab
9680 * @oppfx n/a
9681 * @opflclass bitmap
9682 */
9683FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9684{
9685 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9686 IEMOP_HLP_MIN_386();
9687 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9688 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9689}
9690
9691
9692/**
9693 * @opcode 0xac
9694 * @opflclass shift_count
9695 */
9696FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9697{
9698 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9699 IEMOP_HLP_MIN_386();
9700 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9701}
9702
9703
9704/**
9705 * @opcode 0xad
9706 * @opflclass shift_count
9707 */
9708FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9709{
9710 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9711 IEMOP_HLP_MIN_386();
9712 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9713}
9714
9715
9716/** Opcode 0x0f 0xae mem/0. */
9717FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9718{
9719 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9720 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9721 IEMOP_RAISE_INVALID_OPCODE_RET();
9722
9723 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9724 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9727 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9728 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9729 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9730 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9731 IEM_MC_END();
9732}
9733
9734
9735/** Opcode 0x0f 0xae mem/1. */
9736FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9737{
9738 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9739 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9740 IEMOP_RAISE_INVALID_OPCODE_RET();
9741
9742 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9743 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9746 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9747 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9748 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9749 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9750 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9751 IEM_MC_END();
9752}
9753
9754
9755/**
9756 * @opmaps grp15
9757 * @opcode !11/2
9758 * @oppfx none
9759 * @opcpuid sse
9760 * @opgroup og_sse_mxcsrsm
9761 * @opxcpttype 5
9762 * @optest op1=0 -> mxcsr=0
9763 * @optest op1=0x2083 -> mxcsr=0x2083
9764 * @optest op1=0xfffffffe -> value.xcpt=0xd
9765 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9766 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9767 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9768 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9769 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9770 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9771 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9772 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9773 */
9774FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9775{
9776 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9777 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9778 IEMOP_RAISE_INVALID_OPCODE_RET();
9779
9780 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9781 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9784 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9785 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9786 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9787 IEM_MC_END();
9788}
9789
9790
9791/**
9792 * @opmaps grp15
9793 * @opcode !11/3
9794 * @oppfx none
9795 * @opcpuid sse
9796 * @opgroup og_sse_mxcsrsm
9797 * @opxcpttype 5
9798 * @optest mxcsr=0 -> op1=0
9799 * @optest mxcsr=0x2083 -> op1=0x2083
9800 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9801 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9802 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9803 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9804 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9805 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9806 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9807 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9808 */
9809FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9810{
9811 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9812 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9813 IEMOP_RAISE_INVALID_OPCODE_RET();
9814
9815 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9816 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9819 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9820 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9821 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9822 IEM_MC_END();
9823}
9824
9825
9826/**
9827 * @opmaps grp15
9828 * @opcode !11/4
9829 * @oppfx none
9830 * @opcpuid xsave
9831 * @opgroup og_system
9832 * @opxcpttype none
9833 */
9834FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9835{
9836 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9837 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9838 IEMOP_RAISE_INVALID_OPCODE_RET();
9839
9840 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9841 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9844 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9845 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9846 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9847 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9848 IEM_MC_END();
9849}
9850
9851
9852/**
9853 * @opmaps grp15
9854 * @opcode !11/5
9855 * @oppfx none
9856 * @opcpuid xsave
9857 * @opgroup og_system
9858 * @opxcpttype none
9859 */
9860FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9861{
9862 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9863 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9864 IEMOP_RAISE_INVALID_OPCODE_RET();
9865
9866 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9867 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9870 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9871 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9872 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9873 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9874 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9875 IEM_MC_END();
9876}
9877
9878/** Opcode 0x0f 0xae mem/6. */
9879FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9880
9881/**
9882 * @opmaps grp15
9883 * @opcode !11/7
9884 * @oppfx none
9885 * @opcpuid clfsh
9886 * @opgroup og_cachectl
9887 * @optest op1=1 ->
9888 */
9889FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9890{
9891 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9892 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9893 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9894
9895 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9896 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9899 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9900 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9901 IEM_MC_END();
9902}
9903
9904/**
9905 * @opmaps grp15
9906 * @opcode !11/7
9907 * @oppfx 0x66
9908 * @opcpuid clflushopt
9909 * @opgroup og_cachectl
9910 * @optest op1=1 ->
9911 */
9912FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9913{
9914 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9915 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9916 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9917
9918 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9919 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9922 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9923 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9924 IEM_MC_END();
9925}
9926
9927
9928/** Opcode 0x0f 0xae 11b/5. */
9929FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9930{
9931 RT_NOREF_PV(bRm);
9932 IEMOP_MNEMONIC(lfence, "lfence");
9933 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9935#ifdef RT_ARCH_ARM64
9936 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9937#else
9938 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9939 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9940 else
9941 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9942#endif
9943 IEM_MC_ADVANCE_RIP_AND_FINISH();
9944 IEM_MC_END();
9945}
9946
9947
9948/** Opcode 0x0f 0xae 11b/6. */
9949FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9950{
9951 RT_NOREF_PV(bRm);
9952 IEMOP_MNEMONIC(mfence, "mfence");
9953 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9955#ifdef RT_ARCH_ARM64
9956 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9957#else
9958 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9959 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9960 else
9961 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9962#endif
9963 IEM_MC_ADVANCE_RIP_AND_FINISH();
9964 IEM_MC_END();
9965}
9966
9967
9968/** Opcode 0x0f 0xae 11b/7. */
9969FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9970{
9971 RT_NOREF_PV(bRm);
9972 IEMOP_MNEMONIC(sfence, "sfence");
9973 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9975#ifdef RT_ARCH_ARM64
9976 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9977#else
9978 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9979 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9980 else
9981 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9982#endif
9983 IEM_MC_ADVANCE_RIP_AND_FINISH();
9984 IEM_MC_END();
9985}
9986
9987
9988/** Opcode 0xf3 0x0f 0xae 11b/0. */
9989FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9990{
9991 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9992 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9993 {
9994 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9996 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9997 IEM_MC_LOCAL(uint64_t, u64Dst);
9998 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9999 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10000 IEM_MC_ADVANCE_RIP_AND_FINISH();
10001 IEM_MC_END();
10002 }
10003 else
10004 {
10005 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10007 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10008 IEM_MC_LOCAL(uint32_t, u32Dst);
10009 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10010 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10011 IEM_MC_ADVANCE_RIP_AND_FINISH();
10012 IEM_MC_END();
10013 }
10014}
10015
10016
10017/** Opcode 0xf3 0x0f 0xae 11b/1. */
10018FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10019{
10020 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10021 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10022 {
10023 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10025 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10026 IEM_MC_LOCAL(uint64_t, u64Dst);
10027 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10028 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10029 IEM_MC_ADVANCE_RIP_AND_FINISH();
10030 IEM_MC_END();
10031 }
10032 else
10033 {
10034 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10036 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10037 IEM_MC_LOCAL(uint32_t, u32Dst);
10038 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10039 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10040 IEM_MC_ADVANCE_RIP_AND_FINISH();
10041 IEM_MC_END();
10042 }
10043}
10044
10045
10046/** Opcode 0xf3 0x0f 0xae 11b/2. */
10047FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10048{
10049 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10050 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10051 {
10052 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10054 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10055 IEM_MC_LOCAL(uint64_t, u64Dst);
10056 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10057 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10058 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10059 IEM_MC_ADVANCE_RIP_AND_FINISH();
10060 IEM_MC_END();
10061 }
10062 else
10063 {
10064 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10066 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10067 IEM_MC_LOCAL(uint32_t, u32Dst);
10068 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10069 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10070 IEM_MC_ADVANCE_RIP_AND_FINISH();
10071 IEM_MC_END();
10072 }
10073}
10074
10075
10076/** Opcode 0xf3 0x0f 0xae 11b/3. */
10077FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10078{
10079 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10080 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10081 {
10082 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10084 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10085 IEM_MC_LOCAL(uint64_t, u64Dst);
10086 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10087 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10088 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10089 IEM_MC_ADVANCE_RIP_AND_FINISH();
10090 IEM_MC_END();
10091 }
10092 else
10093 {
10094 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10096 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10097 IEM_MC_LOCAL(uint32_t, u32Dst);
10098 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10099 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10100 IEM_MC_ADVANCE_RIP_AND_FINISH();
10101 IEM_MC_END();
10102 }
10103}
10104
10105
10106/**
10107 * Group 15 jump table for register variant.
10108 */
10109IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10110{ /* pfx: none, 066h, 0f3h, 0f2h */
10111 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10112 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10113 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10114 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10115 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10116 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10117 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10118 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10119};
10120AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10121
10122
10123/**
10124 * Group 15 jump table for memory variant.
10125 */
10126IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10127{ /* pfx: none, 066h, 0f3h, 0f2h */
10128 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10129 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10130 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10131 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10132 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10133 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10134 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10135 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10136};
10137AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10138
10139
10140/** Opcode 0x0f 0xae. */
10141FNIEMOP_DEF(iemOp_Grp15)
10142{
10143 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10145 if (IEM_IS_MODRM_REG_MODE(bRm))
10146 /* register, register */
10147 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10148 + pVCpu->iem.s.idxPrefix], bRm);
10149 /* memory, register */
10150 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10151 + pVCpu->iem.s.idxPrefix], bRm);
10152}
10153
10154
10155/**
10156 * @opcode 0xaf
10157 * @opflclass multiply
10158 */
10159FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10160{
10161 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10162 IEMOP_HLP_MIN_386();
10163 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10164 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10166 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10167}
10168
10169
10170/**
10171 * @opcode 0xb0
10172 * @opflclass arithmetic
10173 */
10174FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10175{
10176 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10177 IEMOP_HLP_MIN_486();
10178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10179
10180 if (IEM_IS_MODRM_REG_MODE(bRm))
10181 {
10182 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10184 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10185 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10186 IEM_MC_ARG(uint8_t, u8Src, 2);
10187 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10188
10189 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10190 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10191 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10192 IEM_MC_REF_EFLAGS(pEFlags);
10193 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10194
10195 IEM_MC_ADVANCE_RIP_AND_FINISH();
10196 IEM_MC_END();
10197 }
10198 else
10199 {
10200#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10201 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10204 IEMOP_HLP_DONE_DECODING(); \
10205 \
10206 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10207 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10208 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10209 \
10210 IEM_MC_ARG(uint8_t, u8Src, 2); \
10211 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10212 \
10213 IEM_MC_LOCAL(uint8_t, u8Al); \
10214 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10215 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10216 \
10217 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10218 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10219 \
10220 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10221 IEM_MC_COMMIT_EFLAGS(EFlags); \
10222 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10223 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10224 IEM_MC_END()
10225
10226 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10227 {
10228 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10229 }
10230 else
10231 {
10232 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10233 }
10234 }
10235}
10236
10237/**
10238 * @opcode 0xb1
10239 * @opflclass arithmetic
10240 */
10241FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10242{
10243 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10244 IEMOP_HLP_MIN_486();
10245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10246
10247 if (IEM_IS_MODRM_REG_MODE(bRm))
10248 {
10249 switch (pVCpu->iem.s.enmEffOpSize)
10250 {
10251 case IEMMODE_16BIT:
10252 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10254 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10255 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10256 IEM_MC_ARG(uint16_t, u16Src, 2);
10257 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10258
10259 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10260 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10261 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10262 IEM_MC_REF_EFLAGS(pEFlags);
10263 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10264
10265 IEM_MC_ADVANCE_RIP_AND_FINISH();
10266 IEM_MC_END();
10267 break;
10268
10269 case IEMMODE_32BIT:
10270 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10272 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10273 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10274 IEM_MC_ARG(uint32_t, u32Src, 2);
10275 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10276
10277 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10278 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10279 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10280 IEM_MC_REF_EFLAGS(pEFlags);
10281 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10282
10283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10284 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10285 } IEM_MC_ELSE() {
10286 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10287 } IEM_MC_ENDIF();
10288
10289 IEM_MC_ADVANCE_RIP_AND_FINISH();
10290 IEM_MC_END();
10291 break;
10292
10293 case IEMMODE_64BIT:
10294 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10296 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10297 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10298 IEM_MC_ARG(uint64_t, u64Src, 2);
10299 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10300
10301 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10302 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10303 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10304 IEM_MC_REF_EFLAGS(pEFlags);
10305 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10306
10307 IEM_MC_ADVANCE_RIP_AND_FINISH();
10308 IEM_MC_END();
10309 break;
10310
10311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10312 }
10313 }
10314 else
10315 {
10316#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10317 do { \
10318 switch (pVCpu->iem.s.enmEffOpSize) \
10319 { \
10320 case IEMMODE_16BIT: \
10321 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10322 \
10323 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10326 IEMOP_HLP_DONE_DECODING(); \
10327 \
10328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10329 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10330 \
10331 IEM_MC_ARG(uint16_t, u16Src, 2); \
10332 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10333 \
10334 IEM_MC_LOCAL(uint16_t, u16Ax); \
10335 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10336 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10337 \
10338 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10339 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10340 \
10341 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10342 IEM_MC_COMMIT_EFLAGS(EFlags); \
10343 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10344 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10345 IEM_MC_END(); \
10346 break; \
10347 \
10348 case IEMMODE_32BIT: \
10349 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10352 IEMOP_HLP_DONE_DECODING(); \
10353 \
10354 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10355 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10356 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10357 \
10358 IEM_MC_ARG(uint32_t, u32Src, 2); \
10359 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10360 \
10361 IEM_MC_LOCAL(uint32_t, u32Eax); \
10362 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10363 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10364 \
10365 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10366 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10367 \
10368 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10369 IEM_MC_COMMIT_EFLAGS(EFlags); \
10370 \
10371 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10372 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10373 } IEM_MC_ENDIF(); \
10374 \
10375 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10376 IEM_MC_END(); \
10377 break; \
10378 \
10379 case IEMMODE_64BIT: \
10380 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10383 IEMOP_HLP_DONE_DECODING(); \
10384 \
10385 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10386 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10387 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10388 \
10389 IEM_MC_ARG(uint64_t, u64Src, 2); \
10390 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10391 \
10392 IEM_MC_LOCAL(uint64_t, u64Rax); \
10393 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10394 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10395 \
10396 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10397 \
10398 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10399 \
10400 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10401 IEM_MC_COMMIT_EFLAGS(EFlags); \
10402 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10403 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10404 IEM_MC_END(); \
10405 break; \
10406 \
10407 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10408 } \
10409 } while (0)
10410
10411 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10412 {
10413 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10414 }
10415 else
10416 {
10417 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10418 }
10419 }
10420}
10421
10422
10423/** Opcode 0x0f 0xb2. */
10424FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10425{
10426 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10427 IEMOP_HLP_MIN_386();
10428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10429 if (IEM_IS_MODRM_REG_MODE(bRm))
10430 IEMOP_RAISE_INVALID_OPCODE_RET();
10431 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10432}
10433
10434
10435/**
10436 * @opcode 0xb3
10437 * @oppfx n/a
10438 * @opflclass bitmap
10439 */
10440FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10441{
10442 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10443 IEMOP_HLP_MIN_386();
10444 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10445 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10446}
10447
10448
10449/** Opcode 0x0f 0xb4. */
10450FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10451{
10452 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10453 IEMOP_HLP_MIN_386();
10454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10455 if (IEM_IS_MODRM_REG_MODE(bRm))
10456 IEMOP_RAISE_INVALID_OPCODE_RET();
10457 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10458}
10459
10460
10461/** Opcode 0x0f 0xb5. */
10462FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10463{
10464 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10465 IEMOP_HLP_MIN_386();
10466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10467 if (IEM_IS_MODRM_REG_MODE(bRm))
10468 IEMOP_RAISE_INVALID_OPCODE_RET();
10469 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10470}
10471
10472
10473/** Opcode 0x0f 0xb6. */
10474FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10475{
10476 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10477 IEMOP_HLP_MIN_386();
10478
10479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10480
10481 /*
10482 * If rm is denoting a register, no more instruction bytes.
10483 */
10484 if (IEM_IS_MODRM_REG_MODE(bRm))
10485 {
10486 switch (pVCpu->iem.s.enmEffOpSize)
10487 {
10488 case IEMMODE_16BIT:
10489 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10491 IEM_MC_LOCAL(uint16_t, u16Value);
10492 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10493 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10494 IEM_MC_ADVANCE_RIP_AND_FINISH();
10495 IEM_MC_END();
10496 break;
10497
10498 case IEMMODE_32BIT:
10499 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10501 IEM_MC_LOCAL(uint32_t, u32Value);
10502 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10503 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10504 IEM_MC_ADVANCE_RIP_AND_FINISH();
10505 IEM_MC_END();
10506 break;
10507
10508 case IEMMODE_64BIT:
10509 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10511 IEM_MC_LOCAL(uint64_t, u64Value);
10512 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10513 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10514 IEM_MC_ADVANCE_RIP_AND_FINISH();
10515 IEM_MC_END();
10516 break;
10517
10518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10519 }
10520 }
10521 else
10522 {
10523 /*
10524 * We're loading a register from memory.
10525 */
10526 switch (pVCpu->iem.s.enmEffOpSize)
10527 {
10528 case IEMMODE_16BIT:
10529 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10530 IEM_MC_LOCAL(uint16_t, u16Value);
10531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10534 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10535 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10536 IEM_MC_ADVANCE_RIP_AND_FINISH();
10537 IEM_MC_END();
10538 break;
10539
10540 case IEMMODE_32BIT:
10541 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10542 IEM_MC_LOCAL(uint32_t, u32Value);
10543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10546 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10547 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10548 IEM_MC_ADVANCE_RIP_AND_FINISH();
10549 IEM_MC_END();
10550 break;
10551
10552 case IEMMODE_64BIT:
10553 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10554 IEM_MC_LOCAL(uint64_t, u64Value);
10555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10558 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10559 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10560 IEM_MC_ADVANCE_RIP_AND_FINISH();
10561 IEM_MC_END();
10562 break;
10563
10564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10565 }
10566 }
10567}
10568
10569
10570/** Opcode 0x0f 0xb7. */
10571FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10572{
10573 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10574 IEMOP_HLP_MIN_386();
10575
10576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10577
10578 /** @todo Not entirely sure how the operand size prefix is handled here,
10579 * assuming that it will be ignored. Would be nice to have a few
10580 * test for this. */
10581
10582 /** @todo There should be no difference in the behaviour whether REX.W is
10583 * present or not... */
10584
10585 /*
10586 * If rm is denoting a register, no more instruction bytes.
10587 */
10588 if (IEM_IS_MODRM_REG_MODE(bRm))
10589 {
10590 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10591 {
10592 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10594 IEM_MC_LOCAL(uint32_t, u32Value);
10595 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10596 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10597 IEM_MC_ADVANCE_RIP_AND_FINISH();
10598 IEM_MC_END();
10599 }
10600 else
10601 {
10602 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10604 IEM_MC_LOCAL(uint64_t, u64Value);
10605 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10606 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10607 IEM_MC_ADVANCE_RIP_AND_FINISH();
10608 IEM_MC_END();
10609 }
10610 }
10611 else
10612 {
10613 /*
10614 * We're loading a register from memory.
10615 */
10616 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10617 {
10618 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10619 IEM_MC_LOCAL(uint32_t, u32Value);
10620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10623 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10624 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10625 IEM_MC_ADVANCE_RIP_AND_FINISH();
10626 IEM_MC_END();
10627 }
10628 else
10629 {
10630 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10631 IEM_MC_LOCAL(uint64_t, u64Value);
10632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10635 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10636 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10637 IEM_MC_ADVANCE_RIP_AND_FINISH();
10638 IEM_MC_END();
10639 }
10640 }
10641}
10642
10643
10644/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10645FNIEMOP_UD_STUB(iemOp_jmpe);
10646
10647
10648/**
10649 * @opcode 0xb8
10650 * @oppfx 0xf3
10651 * @opflmodify cf,pf,af,zf,sf,of
10652 * @opflclear cf,pf,af,sf,of
10653 */
10654FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10655{
10656 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10657 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10658 return iemOp_InvalidNeedRM(pVCpu);
10659#ifndef TST_IEM_CHECK_MC
10660# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10661 static const IEMOPBINSIZES s_Native =
10662 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10663# endif
10664 static const IEMOPBINSIZES s_Fallback =
10665 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10666#endif
10667 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10669 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10670}
10671
10672
10673/**
10674 * @opcode 0xb9
10675 * @opinvalid intel-modrm
10676 * @optest ->
10677 */
10678FNIEMOP_DEF(iemOp_Grp10)
10679{
10680 /*
10681 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10682 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10683 */
10684 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10685 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10686 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10687}
10688
10689
10690/**
10691 * Body for group 8 bit instruction.
10692 */
10693#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10694 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10695 \
10696 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10697 { \
10698 /* register destination. */ \
10699 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10700 \
10701 switch (pVCpu->iem.s.enmEffOpSize) \
10702 { \
10703 case IEMMODE_16BIT: \
10704 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10706 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10707 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10708 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10709 \
10710 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10711 IEM_MC_REF_EFLAGS(pEFlags); \
10712 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10713 \
10714 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10715 IEM_MC_END(); \
10716 break; \
10717 \
10718 case IEMMODE_32BIT: \
10719 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10721 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10722 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10723 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10724 \
10725 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10726 IEM_MC_REF_EFLAGS(pEFlags); \
10727 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10728 \
10729 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10730 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10731 IEM_MC_END(); \
10732 break; \
10733 \
10734 case IEMMODE_64BIT: \
10735 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10737 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10738 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10739 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10740 \
10741 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10742 IEM_MC_REF_EFLAGS(pEFlags); \
10743 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10744 \
10745 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10746 IEM_MC_END(); \
10747 break; \
10748 \
10749 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10750 } \
10751 } \
10752 else \
10753 { \
10754 /* memory destination. */ \
10755 /** @todo test negative bit offsets! */ \
10756 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10757 { \
10758 switch (pVCpu->iem.s.enmEffOpSize) \
10759 { \
10760 case IEMMODE_16BIT: \
10761 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10764 \
10765 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10766 IEMOP_HLP_DONE_DECODING(); \
10767 \
10768 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10769 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10770 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10771 \
10772 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10773 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10774 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10775 \
10776 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10777 IEM_MC_COMMIT_EFLAGS(EFlags); \
10778 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10779 IEM_MC_END(); \
10780 break; \
10781 \
10782 case IEMMODE_32BIT: \
10783 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10786 \
10787 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10788 IEMOP_HLP_DONE_DECODING(); \
10789 \
10790 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10791 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10792 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10793 \
10794 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10795 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10796 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10797 \
10798 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10799 IEM_MC_COMMIT_EFLAGS(EFlags); \
10800 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10801 IEM_MC_END(); \
10802 break; \
10803 \
10804 case IEMMODE_64BIT: \
10805 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10808 \
10809 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10810 IEMOP_HLP_DONE_DECODING(); \
10811 \
10812 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10813 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10814 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10815 \
10816 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10817 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10818 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10819 \
10820 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10821 IEM_MC_COMMIT_EFLAGS(EFlags); \
10822 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10823 IEM_MC_END(); \
10824 break; \
10825 \
10826 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10827 } \
10828 } \
10829 else \
10830 { \
10831 (void)0
10832/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10833#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10834 switch (pVCpu->iem.s.enmEffOpSize) \
10835 { \
10836 case IEMMODE_16BIT: \
10837 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10840 \
10841 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10842 IEMOP_HLP_DONE_DECODING(); \
10843 \
10844 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10845 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10846 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10847 \
10848 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10849 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10850 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10851 \
10852 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10853 IEM_MC_COMMIT_EFLAGS(EFlags); \
10854 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10855 IEM_MC_END(); \
10856 break; \
10857 \
10858 case IEMMODE_32BIT: \
10859 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10862 \
10863 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10864 IEMOP_HLP_DONE_DECODING(); \
10865 \
10866 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10867 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10868 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10869 \
10870 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10871 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10872 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10873 \
10874 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10875 IEM_MC_COMMIT_EFLAGS(EFlags); \
10876 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10877 IEM_MC_END(); \
10878 break; \
10879 \
10880 case IEMMODE_64BIT: \
10881 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10884 \
10885 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10886 IEMOP_HLP_DONE_DECODING(); \
10887 \
10888 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10889 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10890 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10891 \
10892 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10893 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10894 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10895 \
10896 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10897 IEM_MC_COMMIT_EFLAGS(EFlags); \
10898 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10899 IEM_MC_END(); \
10900 break; \
10901 \
10902 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10903 } \
10904 } \
10905 } \
10906 (void)0
10907
10908/* Read-only version (bt) */
10909#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10911 \
10912 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10913 { \
10914 /* register destination. */ \
10915 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10916 \
10917 switch (pVCpu->iem.s.enmEffOpSize) \
10918 { \
10919 case IEMMODE_16BIT: \
10920 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10922 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10923 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10924 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10925 \
10926 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10927 IEM_MC_REF_EFLAGS(pEFlags); \
10928 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10929 \
10930 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10931 IEM_MC_END(); \
10932 break; \
10933 \
10934 case IEMMODE_32BIT: \
10935 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10937 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
10938 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10939 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10940 \
10941 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10942 IEM_MC_REF_EFLAGS(pEFlags); \
10943 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10944 \
10945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10946 IEM_MC_END(); \
10947 break; \
10948 \
10949 case IEMMODE_64BIT: \
10950 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10952 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
10953 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10954 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10955 \
10956 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10957 IEM_MC_REF_EFLAGS(pEFlags); \
10958 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10959 \
10960 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10961 IEM_MC_END(); \
10962 break; \
10963 \
10964 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10965 } \
10966 } \
10967 else \
10968 { \
10969 /* memory destination. */ \
10970 /** @todo test negative bit offsets! */ \
10971 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10972 { \
10973 switch (pVCpu->iem.s.enmEffOpSize) \
10974 { \
10975 case IEMMODE_16BIT: \
10976 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10979 \
10980 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10981 IEMOP_HLP_DONE_DECODING(); \
10982 \
10983 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10984 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10985 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10986 \
10987 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10988 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10989 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10990 \
10991 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10992 IEM_MC_COMMIT_EFLAGS(EFlags); \
10993 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10994 IEM_MC_END(); \
10995 break; \
10996 \
10997 case IEMMODE_32BIT: \
10998 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11001 \
11002 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11003 IEMOP_HLP_DONE_DECODING(); \
11004 \
11005 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11006 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11007 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11008 \
11009 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11010 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11011 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11012 \
11013 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11014 IEM_MC_COMMIT_EFLAGS(EFlags); \
11015 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11016 IEM_MC_END(); \
11017 break; \
11018 \
11019 case IEMMODE_64BIT: \
11020 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11023 \
11024 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11025 IEMOP_HLP_DONE_DECODING(); \
11026 \
11027 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11028 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11029 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11030 \
11031 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11032 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11033 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11034 \
11035 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11036 IEM_MC_COMMIT_EFLAGS(EFlags); \
11037 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11038 IEM_MC_END(); \
11039 break; \
11040 \
11041 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11042 } \
11043 } \
11044 else \
11045 { \
11046 IEMOP_HLP_DONE_DECODING(); \
11047 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11048 } \
11049 } \
11050 (void)0
11051
11052
11053/**
11054 * @opmaps grp8
11055 * @opcode /4
11056 * @oppfx n/a
11057 * @opflclass bitmap
11058 */
11059FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11060{
11061 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11062 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11063}
11064
11065
11066/**
11067 * @opmaps grp8
11068 * @opcode /5
11069 * @oppfx n/a
11070 * @opflclass bitmap
11071 */
11072FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11073{
11074 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11075 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11076 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11077}
11078
11079
11080/**
11081 * @opmaps grp8
11082 * @opcode /6
11083 * @oppfx n/a
11084 * @opflclass bitmap
11085 */
11086FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11087{
11088 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11089 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11090 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11091}
11092
11093
11094/**
11095 * @opmaps grp8
11096 * @opcode /7
11097 * @oppfx n/a
11098 * @opflclass bitmap
11099 */
11100FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11101{
11102 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11103 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11104 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11105}
11106
11107
11108/** Opcode 0x0f 0xba. */
11109FNIEMOP_DEF(iemOp_Grp8)
11110{
11111 IEMOP_HLP_MIN_386();
11112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11113 switch (IEM_GET_MODRM_REG_8(bRm))
11114 {
11115 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11116 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11117 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11118 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11119
11120 case 0: case 1: case 2: case 3:
11121 /* Both AMD and Intel want full modr/m decoding and imm8. */
11122 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11123
11124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11125 }
11126}
11127
11128
11129/**
11130 * @opcode 0xbb
11131 * @oppfx n/a
11132 * @opflclass bitmap
11133 */
11134FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11135{
11136 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11137 IEMOP_HLP_MIN_386();
11138 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11139 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11140}
11141
11142
11143/**
11144 * Body for BSF and BSR instructions.
11145 *
11146 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11147 * the destination register, which means that for 32-bit operations the high
11148 * bits must be left alone.
11149 *
11150 * @param pImpl Pointer to the instruction implementation (assembly).
11151 */
11152#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
11153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
11154 \
11155 /* \
11156 * If rm is denoting a register, no more instruction bytes. \
11157 */ \
11158 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11159 { \
11160 switch (pVCpu->iem.s.enmEffOpSize) \
11161 { \
11162 case IEMMODE_16BIT: \
11163 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11165 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11166 IEM_MC_ARG(uint16_t, u16Src, 1); \
11167 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11168 \
11169 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11170 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11171 IEM_MC_REF_EFLAGS(pEFlags); \
11172 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); \
11173 \
11174 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11175 IEM_MC_END(); \
11176 break; \
11177 \
11178 case IEMMODE_32BIT: \
11179 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11181 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11182 IEM_MC_ARG(uint32_t, u32Src, 1); \
11183 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11184 \
11185 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11186 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11187 IEM_MC_REF_EFLAGS(pEFlags); \
11188 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); \
11189 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11190 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11191 } IEM_MC_ENDIF(); \
11192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11193 IEM_MC_END(); \
11194 break; \
11195 \
11196 case IEMMODE_64BIT: \
11197 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11199 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11200 IEM_MC_ARG(uint64_t, u64Src, 1); \
11201 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11202 \
11203 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11204 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11205 IEM_MC_REF_EFLAGS(pEFlags); \
11206 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); \
11207 \
11208 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11209 IEM_MC_END(); \
11210 break; \
11211 \
11212 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11213 } \
11214 } \
11215 else \
11216 { \
11217 /* \
11218 * We're accessing memory. \
11219 */ \
11220 switch (pVCpu->iem.s.enmEffOpSize) \
11221 { \
11222 case IEMMODE_16BIT: \
11223 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11224 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11225 IEM_MC_ARG(uint16_t, u16Src, 1); \
11226 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11228 \
11229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11231 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11232 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11233 IEM_MC_REF_EFLAGS(pEFlags); \
11234 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); \
11235 \
11236 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11237 IEM_MC_END(); \
11238 break; \
11239 \
11240 case IEMMODE_32BIT: \
11241 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11242 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11243 IEM_MC_ARG(uint32_t, u32Src, 1); \
11244 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11246 \
11247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11249 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11250 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11251 IEM_MC_REF_EFLAGS(pEFlags); \
11252 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); \
11253 \
11254 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11255 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11256 } IEM_MC_ENDIF(); \
11257 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11258 IEM_MC_END(); \
11259 break; \
11260 \
11261 case IEMMODE_64BIT: \
11262 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11263 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11264 IEM_MC_ARG(uint64_t, u64Src, 1); \
11265 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11267 \
11268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11270 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11271 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11272 IEM_MC_REF_EFLAGS(pEFlags); \
11273 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); \
11274 \
11275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11276 IEM_MC_END(); \
11277 break; \
11278 \
11279 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11280 } \
11281 } (void)0
11282
11283
11284/**
11285 * @opcode 0xbc
11286 * @oppfx !0xf3
11287 * @opfltest cf,pf,af,sf,of
11288 * @opflmodify cf,pf,af,zf,sf,of
11289 * @opflundef cf,pf,af,sf,of
11290 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11291 * document them as inputs. Sigh.
11292 */
11293FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11294{
11295 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11296 IEMOP_HLP_MIN_386();
11297 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11298 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11299 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11300}
11301
11302
11303/**
11304 * @opcode 0xbc
11305 * @oppfx 0xf3
11306 * @opfltest pf,af,sf,of
11307 * @opflmodify cf,pf,af,zf,sf,of
11308 * @opflundef pf,af,sf,of
11309 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11310 * document them as inputs. Sigh.
11311 */
11312FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11313{
11314 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11315 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11316 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11317
11318#ifndef TST_IEM_CHECK_MC
11319 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11320 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11321 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11322 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11323 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11324 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11325 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11326 {
11327 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11328 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11329 };
11330#endif
11331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11332 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11333 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11335 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11336}
11337
11338
11339/**
11340 * @opcode 0xbd
11341 * @oppfx !0xf3
11342 * @opfltest cf,pf,af,sf,of
11343 * @opflmodify cf,pf,af,zf,sf,of
11344 * @opflundef cf,pf,af,sf,of
11345 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11346 * document them as inputs. Sigh.
11347 */
11348FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11349{
11350 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11351 IEMOP_HLP_MIN_386();
11352 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11353 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11354 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11355}
11356
11357
11358/**
11359 * @opcode 0xbd
11360 * @oppfx 0xf3
11361 * @opfltest pf,af,sf,of
11362 * @opflmodify cf,pf,af,zf,sf,of
11363 * @opflundef pf,af,sf,of
11364 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11365 * document them as inputs. Sigh.
11366 */
11367FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11368{
11369 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11370 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11371 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11372
11373#ifndef TST_IEM_CHECK_MC
11374 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11375 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11376 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11377 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11378 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11379 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11380 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11381 {
11382 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11383 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11384 };
11385#endif
11386 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11387 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11388 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11390 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11391}
11392
11393
11394
11395/** Opcode 0x0f 0xbe. */
11396FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11397{
11398 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11399 IEMOP_HLP_MIN_386();
11400
11401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11402
11403 /*
11404 * If rm is denoting a register, no more instruction bytes.
11405 */
11406 if (IEM_IS_MODRM_REG_MODE(bRm))
11407 {
11408 switch (pVCpu->iem.s.enmEffOpSize)
11409 {
11410 case IEMMODE_16BIT:
11411 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11413 IEM_MC_LOCAL(uint16_t, u16Value);
11414 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11415 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11416 IEM_MC_ADVANCE_RIP_AND_FINISH();
11417 IEM_MC_END();
11418 break;
11419
11420 case IEMMODE_32BIT:
11421 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11423 IEM_MC_LOCAL(uint32_t, u32Value);
11424 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11425 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11426 IEM_MC_ADVANCE_RIP_AND_FINISH();
11427 IEM_MC_END();
11428 break;
11429
11430 case IEMMODE_64BIT:
11431 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11433 IEM_MC_LOCAL(uint64_t, u64Value);
11434 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11435 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11436 IEM_MC_ADVANCE_RIP_AND_FINISH();
11437 IEM_MC_END();
11438 break;
11439
11440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11441 }
11442 }
11443 else
11444 {
11445 /*
11446 * We're loading a register from memory.
11447 */
11448 switch (pVCpu->iem.s.enmEffOpSize)
11449 {
11450 case IEMMODE_16BIT:
11451 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11452 IEM_MC_LOCAL(uint16_t, u16Value);
11453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11456 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11457 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11458 IEM_MC_ADVANCE_RIP_AND_FINISH();
11459 IEM_MC_END();
11460 break;
11461
11462 case IEMMODE_32BIT:
11463 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11464 IEM_MC_LOCAL(uint32_t, u32Value);
11465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11468 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11469 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11470 IEM_MC_ADVANCE_RIP_AND_FINISH();
11471 IEM_MC_END();
11472 break;
11473
11474 case IEMMODE_64BIT:
11475 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11476 IEM_MC_LOCAL(uint64_t, u64Value);
11477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11480 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11481 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11482 IEM_MC_ADVANCE_RIP_AND_FINISH();
11483 IEM_MC_END();
11484 break;
11485
11486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11487 }
11488 }
11489}
11490
11491
11492/** Opcode 0x0f 0xbf. */
11493FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11494{
11495 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11496 IEMOP_HLP_MIN_386();
11497
11498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11499
11500 /** @todo Not entirely sure how the operand size prefix is handled here,
11501 * assuming that it will be ignored. Would be nice to have a few
11502 * test for this. */
11503 /*
11504 * If rm is denoting a register, no more instruction bytes.
11505 */
11506 if (IEM_IS_MODRM_REG_MODE(bRm))
11507 {
11508 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11509 {
11510 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11512 IEM_MC_LOCAL(uint32_t, u32Value);
11513 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11514 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11515 IEM_MC_ADVANCE_RIP_AND_FINISH();
11516 IEM_MC_END();
11517 }
11518 else
11519 {
11520 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11522 IEM_MC_LOCAL(uint64_t, u64Value);
11523 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11524 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11525 IEM_MC_ADVANCE_RIP_AND_FINISH();
11526 IEM_MC_END();
11527 }
11528 }
11529 else
11530 {
11531 /*
11532 * We're loading a register from memory.
11533 */
11534 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11535 {
11536 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11537 IEM_MC_LOCAL(uint32_t, u32Value);
11538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11541 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11542 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11543 IEM_MC_ADVANCE_RIP_AND_FINISH();
11544 IEM_MC_END();
11545 }
11546 else
11547 {
11548 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11549 IEM_MC_LOCAL(uint64_t, u64Value);
11550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11553 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11554 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11555 IEM_MC_ADVANCE_RIP_AND_FINISH();
11556 IEM_MC_END();
11557 }
11558 }
11559}
11560
11561
11562/**
11563 * @opcode 0xc0
11564 * @opflclass arithmetic
11565 */
11566FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11567{
11568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11569 IEMOP_HLP_MIN_486();
11570 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11571
11572 /*
11573 * If rm is denoting a register, no more instruction bytes.
11574 */
11575 if (IEM_IS_MODRM_REG_MODE(bRm))
11576 {
11577 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11579 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11580 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11581 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11582
11583 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11584 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11585 IEM_MC_REF_EFLAGS(pEFlags);
11586 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11587
11588 IEM_MC_ADVANCE_RIP_AND_FINISH();
11589 IEM_MC_END();
11590 }
11591 else
11592 {
11593 /*
11594 * We're accessing memory.
11595 */
11596#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11597 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11600 IEMOP_HLP_DONE_DECODING(); \
11601 \
11602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11603 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11604 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11605 \
11606 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11607 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11608 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11609 \
11610 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11611 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11612 \
11613 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11614 IEM_MC_COMMIT_EFLAGS(EFlags); \
11615 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11616 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11617 IEM_MC_END()
11618 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11619 {
11620 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11621 }
11622 else
11623 {
11624 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11625 }
11626 }
11627}
11628
11629
11630/**
11631 * @opcode 0xc1
11632 * @opflclass arithmetic
11633 */
11634FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11635{
11636 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11637 IEMOP_HLP_MIN_486();
11638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11639
11640 /*
11641 * If rm is denoting a register, no more instruction bytes.
11642 */
11643 if (IEM_IS_MODRM_REG_MODE(bRm))
11644 {
11645 switch (pVCpu->iem.s.enmEffOpSize)
11646 {
11647 case IEMMODE_16BIT:
11648 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11650 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11651 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11652 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11653
11654 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11655 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11656 IEM_MC_REF_EFLAGS(pEFlags);
11657 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11658
11659 IEM_MC_ADVANCE_RIP_AND_FINISH();
11660 IEM_MC_END();
11661 break;
11662
11663 case IEMMODE_32BIT:
11664 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11666 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11667 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11668 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11669
11670 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11671 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11672 IEM_MC_REF_EFLAGS(pEFlags);
11673 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11674
11675 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11676 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11677 IEM_MC_ADVANCE_RIP_AND_FINISH();
11678 IEM_MC_END();
11679 break;
11680
11681 case IEMMODE_64BIT:
11682 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11684 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11685 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11686 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11687
11688 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11689 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11690 IEM_MC_REF_EFLAGS(pEFlags);
11691 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11692
11693 IEM_MC_ADVANCE_RIP_AND_FINISH();
11694 IEM_MC_END();
11695 break;
11696
11697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11698 }
11699 }
11700 else
11701 {
11702 /*
11703 * We're accessing memory.
11704 */
11705#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11706 do { \
11707 switch (pVCpu->iem.s.enmEffOpSize) \
11708 { \
11709 case IEMMODE_16BIT: \
11710 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11713 IEMOP_HLP_DONE_DECODING(); \
11714 \
11715 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11716 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11717 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11718 \
11719 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11720 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11721 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11722 \
11723 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11724 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11725 \
11726 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11727 IEM_MC_COMMIT_EFLAGS(EFlags); \
11728 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11729 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11730 IEM_MC_END(); \
11731 break; \
11732 \
11733 case IEMMODE_32BIT: \
11734 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11737 IEMOP_HLP_DONE_DECODING(); \
11738 \
11739 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11740 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11741 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11742 \
11743 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11744 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11745 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11746 \
11747 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11748 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11749 \
11750 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11751 IEM_MC_COMMIT_EFLAGS(EFlags); \
11752 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11753 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11754 IEM_MC_END(); \
11755 break; \
11756 \
11757 case IEMMODE_64BIT: \
11758 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11761 IEMOP_HLP_DONE_DECODING(); \
11762 \
11763 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11764 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11765 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11766 \
11767 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11768 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11769 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11770 \
11771 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11772 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11773 \
11774 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11775 IEM_MC_COMMIT_EFLAGS(EFlags); \
11776 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11777 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11778 IEM_MC_END(); \
11779 break; \
11780 \
11781 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11782 } \
11783 } while (0)
11784
11785 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11786 {
11787 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11788 }
11789 else
11790 {
11791 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11792 }
11793 }
11794}
11795
11796
11797/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11798FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11799{
11800 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11801
11802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11803 if (IEM_IS_MODRM_REG_MODE(bRm))
11804 {
11805 /*
11806 * XMM, XMM.
11807 */
11808 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11809 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11811 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11812 IEM_MC_LOCAL(X86XMMREG, Dst);
11813 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11814 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11815 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11816 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11817 IEM_MC_PREPARE_SSE_USAGE();
11818 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11819 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11820 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11821 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11822 } IEM_MC_ELSE() {
11823 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11824 } IEM_MC_ENDIF();
11825
11826 IEM_MC_ADVANCE_RIP_AND_FINISH();
11827 IEM_MC_END();
11828 }
11829 else
11830 {
11831 /*
11832 * XMM, [mem128].
11833 */
11834 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11835 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11836 IEM_MC_LOCAL(X86XMMREG, Dst);
11837 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11838 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11840
11841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11842 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11843 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11845 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11846 IEM_MC_PREPARE_SSE_USAGE();
11847
11848 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11849 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11850 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11851 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11852 } IEM_MC_ELSE() {
11853 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11854 } IEM_MC_ENDIF();
11855
11856 IEM_MC_ADVANCE_RIP_AND_FINISH();
11857 IEM_MC_END();
11858 }
11859}
11860
11861
11862/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11863FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11864{
11865 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11866
11867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11868 if (IEM_IS_MODRM_REG_MODE(bRm))
11869 {
11870 /*
11871 * XMM, XMM.
11872 */
11873 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11874 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11876 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11877 IEM_MC_LOCAL(X86XMMREG, Dst);
11878 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11879 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11880 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11881 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11882 IEM_MC_PREPARE_SSE_USAGE();
11883 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11884 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11885 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11886 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11887 } IEM_MC_ELSE() {
11888 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11889 } IEM_MC_ENDIF();
11890
11891 IEM_MC_ADVANCE_RIP_AND_FINISH();
11892 IEM_MC_END();
11893 }
11894 else
11895 {
11896 /*
11897 * XMM, [mem128].
11898 */
11899 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11900 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11901 IEM_MC_LOCAL(X86XMMREG, Dst);
11902 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11903 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11905
11906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11907 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11908 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11910 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11911 IEM_MC_PREPARE_SSE_USAGE();
11912
11913 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11914 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11915 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11916 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11917 } IEM_MC_ELSE() {
11918 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11919 } IEM_MC_ENDIF();
11920
11921 IEM_MC_ADVANCE_RIP_AND_FINISH();
11922 IEM_MC_END();
11923 }
11924}
11925
11926
11927/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11928FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11929{
11930 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11931
11932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11933 if (IEM_IS_MODRM_REG_MODE(bRm))
11934 {
11935 /*
11936 * XMM32, XMM32.
11937 */
11938 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11939 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11941 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11942 IEM_MC_LOCAL(X86XMMREG, Dst);
11943 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11944 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11945 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11946 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11947 IEM_MC_PREPARE_SSE_USAGE();
11948 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11949 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11950 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11951 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11952 } IEM_MC_ELSE() {
11953 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11954 } IEM_MC_ENDIF();
11955
11956 IEM_MC_ADVANCE_RIP_AND_FINISH();
11957 IEM_MC_END();
11958 }
11959 else
11960 {
11961 /*
11962 * XMM32, [mem32].
11963 */
11964 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11965 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11966 IEM_MC_LOCAL(X86XMMREG, Dst);
11967 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11968 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11970
11971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11972 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11973 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11975 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11976 IEM_MC_PREPARE_SSE_USAGE();
11977
11978 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11979 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11980 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11981 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11982 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11983 } IEM_MC_ELSE() {
11984 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11985 } IEM_MC_ENDIF();
11986
11987 IEM_MC_ADVANCE_RIP_AND_FINISH();
11988 IEM_MC_END();
11989 }
11990}
11991
11992
11993/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11994FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11995{
11996 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11997
11998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11999 if (IEM_IS_MODRM_REG_MODE(bRm))
12000 {
12001 /*
12002 * XMM64, XMM64.
12003 */
12004 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12005 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12007 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12008 IEM_MC_LOCAL(X86XMMREG, Dst);
12009 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
12010 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
12011 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12013 IEM_MC_PREPARE_SSE_USAGE();
12014 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
12015 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
12016 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12017 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12018 } IEM_MC_ELSE() {
12019 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12020 } IEM_MC_ENDIF();
12021
12022 IEM_MC_ADVANCE_RIP_AND_FINISH();
12023 IEM_MC_END();
12024 }
12025 else
12026 {
12027 /*
12028 * XMM64, [mem64].
12029 */
12030 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12031 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12032 IEM_MC_LOCAL(X86XMMREG, Dst);
12033 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
12034 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
12035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12036
12037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12038 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12039 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12041 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12042 IEM_MC_PREPARE_SSE_USAGE();
12043
12044 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12045 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12046 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
12047 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12048 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12049 } IEM_MC_ELSE() {
12050 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12051 } IEM_MC_ENDIF();
12052
12053 IEM_MC_ADVANCE_RIP_AND_FINISH();
12054 IEM_MC_END();
12055 }
12056}
12057
12058
12059/** Opcode 0x0f 0xc3. */
12060FNIEMOP_DEF(iemOp_movnti_My_Gy)
12061{
12062 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12063
12064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12065
12066 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12067 if (IEM_IS_MODRM_MEM_MODE(bRm))
12068 {
12069 switch (pVCpu->iem.s.enmEffOpSize)
12070 {
12071 case IEMMODE_32BIT:
12072 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
12073 IEM_MC_LOCAL(uint32_t, u32Value);
12074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12075
12076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12078
12079 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12080 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12081 IEM_MC_ADVANCE_RIP_AND_FINISH();
12082 IEM_MC_END();
12083 break;
12084
12085 case IEMMODE_64BIT:
12086 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12087 IEM_MC_LOCAL(uint64_t, u64Value);
12088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12089
12090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12092
12093 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12094 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12095 IEM_MC_ADVANCE_RIP_AND_FINISH();
12096 IEM_MC_END();
12097 break;
12098
12099 case IEMMODE_16BIT:
12100 /** @todo check this form. */
12101 IEMOP_RAISE_INVALID_OPCODE_RET();
12102
12103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12104 }
12105 }
12106 else
12107 IEMOP_RAISE_INVALID_OPCODE_RET();
12108}
12109
12110
12111/* Opcode 0x66 0x0f 0xc3 - invalid */
12112/* Opcode 0xf3 0x0f 0xc3 - invalid */
12113/* Opcode 0xf2 0x0f 0xc3 - invalid */
12114
12115
12116/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12117FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12118{
12119 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12121 if (IEM_IS_MODRM_REG_MODE(bRm))
12122 {
12123 /*
12124 * Register, register.
12125 */
12126 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12127 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12128 IEM_MC_LOCAL(uint16_t, uValue);
12129
12130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12131 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12132 IEM_MC_PREPARE_FPU_USAGE();
12133 IEM_MC_FPU_TO_MMX_MODE();
12134
12135 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12136 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12137
12138 IEM_MC_ADVANCE_RIP_AND_FINISH();
12139 IEM_MC_END();
12140 }
12141 else
12142 {
12143 /*
12144 * Register, memory.
12145 */
12146 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12148 IEM_MC_LOCAL(uint16_t, uValue);
12149
12150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12151 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12153 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12154 IEM_MC_PREPARE_FPU_USAGE();
12155
12156 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12157 IEM_MC_FPU_TO_MMX_MODE();
12158 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12159
12160 IEM_MC_ADVANCE_RIP_AND_FINISH();
12161 IEM_MC_END();
12162 }
12163}
12164
12165
12166/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12167FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12168{
12169 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12171 if (IEM_IS_MODRM_REG_MODE(bRm))
12172 {
12173 /*
12174 * Register, register.
12175 */
12176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12177 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12179
12180 IEM_MC_LOCAL(uint16_t, uValue);
12181 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12182 IEM_MC_PREPARE_SSE_USAGE();
12183
12184 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12185 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12186 IEM_MC_ADVANCE_RIP_AND_FINISH();
12187 IEM_MC_END();
12188 }
12189 else
12190 {
12191 /*
12192 * Register, memory.
12193 */
12194 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12196 IEM_MC_LOCAL(uint16_t, uValue);
12197
12198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12199 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12201 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12202 IEM_MC_PREPARE_SSE_USAGE();
12203
12204 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12205 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12206 IEM_MC_ADVANCE_RIP_AND_FINISH();
12207 IEM_MC_END();
12208 }
12209}
12210
12211
12212/* Opcode 0xf3 0x0f 0xc4 - invalid */
12213/* Opcode 0xf2 0x0f 0xc4 - invalid */
12214
12215
12216/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12217FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12218{
12219 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12221 if (IEM_IS_MODRM_REG_MODE(bRm))
12222 {
12223 /*
12224 * Greg32, MMX, imm8.
12225 */
12226 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12227 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12229 IEM_MC_LOCAL(uint16_t, uValue);
12230 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12231 IEM_MC_PREPARE_FPU_USAGE();
12232 IEM_MC_FPU_TO_MMX_MODE();
12233 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12234 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12235 IEM_MC_ADVANCE_RIP_AND_FINISH();
12236 IEM_MC_END();
12237 }
12238 /* No memory operand. */
12239 else
12240 IEMOP_RAISE_INVALID_OPCODE_RET();
12241}
12242
12243
12244/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12245FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12246{
12247 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12249 if (IEM_IS_MODRM_REG_MODE(bRm))
12250 {
12251 /*
12252 * Greg32, XMM, imm8.
12253 */
12254 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12255 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12257 IEM_MC_LOCAL(uint16_t, uValue);
12258 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12259 IEM_MC_PREPARE_SSE_USAGE();
12260 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12261 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12262 IEM_MC_ADVANCE_RIP_AND_FINISH();
12263 IEM_MC_END();
12264 }
12265 /* No memory operand. */
12266 else
12267 IEMOP_RAISE_INVALID_OPCODE_RET();
12268}
12269
12270
12271/* Opcode 0xf3 0x0f 0xc5 - invalid */
12272/* Opcode 0xf2 0x0f 0xc5 - invalid */
12273
12274
12275/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12276FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12277{
12278 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12280 if (IEM_IS_MODRM_REG_MODE(bRm))
12281 {
12282 /*
12283 * XMM, XMM, imm8.
12284 */
12285 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12286 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12288 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12289 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12290 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12291 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12292 IEM_MC_PREPARE_SSE_USAGE();
12293 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12294 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12295 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12296 IEM_MC_ADVANCE_RIP_AND_FINISH();
12297 IEM_MC_END();
12298 }
12299 else
12300 {
12301 /*
12302 * XMM, [mem128], imm8.
12303 */
12304 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12305 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12306 IEM_MC_LOCAL(RTUINT128U, uSrc);
12307 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12309
12310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12311 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12312 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12314 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12315 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12316
12317 IEM_MC_PREPARE_SSE_USAGE();
12318 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12319 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12320
12321 IEM_MC_ADVANCE_RIP_AND_FINISH();
12322 IEM_MC_END();
12323 }
12324}
12325
12326
12327/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12328FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12329{
12330 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12332 if (IEM_IS_MODRM_REG_MODE(bRm))
12333 {
12334 /*
12335 * XMM, XMM, imm8.
12336 */
12337 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12338 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12340 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12341 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12342 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12343 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12344 IEM_MC_PREPARE_SSE_USAGE();
12345 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12346 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12347 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12348 IEM_MC_ADVANCE_RIP_AND_FINISH();
12349 IEM_MC_END();
12350 }
12351 else
12352 {
12353 /*
12354 * XMM, [mem128], imm8.
12355 */
12356 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12357 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12358 IEM_MC_LOCAL(RTUINT128U, uSrc);
12359 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12361
12362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12363 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12364 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12366 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12367 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12368
12369 IEM_MC_PREPARE_SSE_USAGE();
12370 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12371 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12372
12373 IEM_MC_ADVANCE_RIP_AND_FINISH();
12374 IEM_MC_END();
12375 }
12376}
12377
12378
12379/* Opcode 0xf3 0x0f 0xc6 - invalid */
12380/* Opcode 0xf2 0x0f 0xc6 - invalid */
12381
12382
12383/**
12384 * @opmaps grp9
12385 * @opcode /1
12386 * @opcodesub !11 mr/reg rex.w=0
12387 * @oppfx n/a
12388 * @opflmodify zf
12389 */
12390FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12391{
12392 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12393#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12394 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12397 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12398 \
12399 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12400 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12401 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12402 \
12403 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12404 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12405 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12406 \
12407 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12408 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12409 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12410 \
12411 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12412 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12413 \
12414 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12415 IEM_MC_COMMIT_EFLAGS(EFlags); \
12416 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12417 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12418 } IEM_MC_ENDIF(); \
12419 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12420 \
12421 IEM_MC_END()
12422 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12423 {
12424 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12425 }
12426 else
12427 {
12428 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12429 }
12430}
12431
12432
12433/**
12434 * @opmaps grp9
12435 * @opcode /1
12436 * @opcodesub !11 mr/reg rex.w=1
12437 * @oppfx n/a
12438 * @opflmodify zf
12439 */
12440FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12441{
12442 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12443 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12444 {
12445 /*
12446 * This is hairy, very hairy macro fun. We're walking a fine line
12447 * here to make the code parsable by IEMAllInstPython.py and fit into
12448 * the patterns IEMAllThrdPython.py requires for the code morphing.
12449 */
12450#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12451 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12454 IEMOP_HLP_DONE_DECODING(); \
12455 \
12456 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12457 bUnmapInfoStmt; \
12458 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12459 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12460 \
12461 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12462 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12463 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12464 \
12465 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12466 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12467 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12468 \
12469 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12470
12471#define BODY_CMPXCHG16B_TAIL(a_Type) \
12472 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12473 IEM_MC_COMMIT_EFLAGS(EFlags); \
12474 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12475 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12476 } IEM_MC_ENDIF(); \
12477 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12478 IEM_MC_END()
12479
12480#ifdef RT_ARCH_AMD64
12481 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12482 {
12483 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12484 {
12485 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12486 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12487 BODY_CMPXCHG16B_TAIL(RW);
12488 }
12489 else
12490 {
12491 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12492 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12493 BODY_CMPXCHG16B_TAIL(ATOMIC);
12494 }
12495 }
12496 else
12497 { /* (see comments in #else case below) */
12498 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12499 {
12500 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12501 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12502 BODY_CMPXCHG16B_TAIL(RW);
12503 }
12504 else
12505 {
12506 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12507 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12508 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12509 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12510 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12511 pEFlags, bUnmapInfo);
12512 IEM_MC_END();
12513 }
12514 }
12515
12516#elif defined(RT_ARCH_ARM64)
12517 /** @todo may require fallback for unaligned accesses... */
12518 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12519 {
12520 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12521 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12522 BODY_CMPXCHG16B_TAIL(RW);
12523 }
12524 else
12525 {
12526 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12527 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12528 BODY_CMPXCHG16B_TAIL(ATOMIC);
12529 }
12530
12531#else
12532 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12533 accesses and not all all atomic, which works fine on in UNI CPU guest
12534 configuration (ignoring DMA). If guest SMP is active we have no choice
12535 but to use a rendezvous callback here. Sigh. */
12536 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12537 {
12538 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12539 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12540 BODY_CMPXCHG16B_TAIL(RW);
12541 }
12542 else
12543 {
12544 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12545 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12546 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12547 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12548 iemCImpl_cmpxchg16b_fallback_rendezvous,
12549 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12550 IEM_MC_END();
12551 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12552 }
12553#endif
12554
12555#undef BODY_CMPXCHG16B
12556 }
12557 Log(("cmpxchg16b -> #UD\n"));
12558 IEMOP_RAISE_INVALID_OPCODE_RET();
12559}
12560
12561FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12562{
12563 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12564 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12565 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12566}
12567
12568
12569/** Opcode 0x0f 0xc7 11/6. */
12570FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12571{
12572 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12573 IEMOP_RAISE_INVALID_OPCODE_RET();
12574
12575 if (IEM_IS_MODRM_REG_MODE(bRm))
12576 {
12577 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12579 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12580 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12581 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12582 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12583 iemCImpl_rdrand, iReg, enmEffOpSize);
12584 IEM_MC_END();
12585 }
12586 /* Register only. */
12587 else
12588 IEMOP_RAISE_INVALID_OPCODE_RET();
12589}
12590
12591/** Opcode 0x0f 0xc7 !11/6. */
12592#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12593FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12594{
12595 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12596 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12597 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12598 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12599 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12601 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12602 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12603 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12604 IEM_MC_END();
12605}
12606#else
12607FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12608#endif
12609
12610/** Opcode 0x66 0x0f 0xc7 !11/6. */
12611#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12612FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12613{
12614 IEMOP_MNEMONIC(vmclear, "vmclear");
12615 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12616 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12617 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12618 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12620 IEMOP_HLP_DONE_DECODING();
12621 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12622 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12623 IEM_MC_END();
12624}
12625#else
12626FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12627#endif
12628
12629/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12630#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12631FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12632{
12633 IEMOP_MNEMONIC(vmxon, "vmxon");
12634 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12635 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12636 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12638 IEMOP_HLP_DONE_DECODING();
12639 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12640 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12641 IEM_MC_END();
12642}
12643#else
12644FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12645#endif
12646
12647/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12648#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12649FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12650{
12651 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12652 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12653 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12654 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12655 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12657 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12658 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12659 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12660 IEM_MC_END();
12661}
12662#else
12663FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12664#endif
12665
12666/** Opcode 0x0f 0xc7 11/7. */
12667FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12668{
12669 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12670 IEMOP_RAISE_INVALID_OPCODE_RET();
12671
12672 if (IEM_IS_MODRM_REG_MODE(bRm))
12673 {
12674 /* register destination. */
12675 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12677 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12678 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12679 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12680 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12681 iemCImpl_rdseed, iReg, enmEffOpSize);
12682 IEM_MC_END();
12683 }
12684 /* Register only. */
12685 else
12686 IEMOP_RAISE_INVALID_OPCODE_RET();
12687}
12688
12689/**
12690 * Group 9 jump table for register variant.
12691 */
12692IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12693{ /* pfx: none, 066h, 0f3h, 0f2h */
12694 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12695 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12696 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12697 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12698 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12699 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12700 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12701 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12702};
12703AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12704
12705
12706/**
12707 * Group 9 jump table for memory variant.
12708 */
12709IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12710{ /* pfx: none, 066h, 0f3h, 0f2h */
12711 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12712 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12713 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12714 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12715 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12716 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12717 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12718 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12719};
12720AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12721
12722
12723/** Opcode 0x0f 0xc7. */
12724FNIEMOP_DEF(iemOp_Grp9)
12725{
12726 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12727 if (IEM_IS_MODRM_REG_MODE(bRm))
12728 /* register, register */
12729 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12730 + pVCpu->iem.s.idxPrefix], bRm);
12731 /* memory, register */
12732 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12733 + pVCpu->iem.s.idxPrefix], bRm);
12734}
12735
12736
12737/**
12738 * Common 'bswap register' helper.
12739 */
12740FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12741{
12742 switch (pVCpu->iem.s.enmEffOpSize)
12743 {
12744 case IEMMODE_16BIT:
12745 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12747 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12748 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12749 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12750 IEM_MC_ADVANCE_RIP_AND_FINISH();
12751 IEM_MC_END();
12752 break;
12753
12754 case IEMMODE_32BIT:
12755 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12757 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12758 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12759 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12760 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12761 IEM_MC_ADVANCE_RIP_AND_FINISH();
12762 IEM_MC_END();
12763 break;
12764
12765 case IEMMODE_64BIT:
12766 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12768 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12769 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12770 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12771 IEM_MC_ADVANCE_RIP_AND_FINISH();
12772 IEM_MC_END();
12773 break;
12774
12775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12776 }
12777}
12778
12779
12780/** Opcode 0x0f 0xc8. */
12781FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12782{
12783 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12784 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12785 prefix. REX.B is the correct prefix it appears. For a parallel
12786 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12787 IEMOP_HLP_MIN_486();
12788 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12789}
12790
12791
12792/** Opcode 0x0f 0xc9. */
12793FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12794{
12795 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12796 IEMOP_HLP_MIN_486();
12797 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12798}
12799
12800
12801/** Opcode 0x0f 0xca. */
12802FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12803{
12804 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12805 IEMOP_HLP_MIN_486();
12806 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12807}
12808
12809
12810/** Opcode 0x0f 0xcb. */
12811FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12812{
12813 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12814 IEMOP_HLP_MIN_486();
12815 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12816}
12817
12818
12819/** Opcode 0x0f 0xcc. */
12820FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12821{
12822 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12823 IEMOP_HLP_MIN_486();
12824 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12825}
12826
12827
12828/** Opcode 0x0f 0xcd. */
12829FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12830{
12831 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12832 IEMOP_HLP_MIN_486();
12833 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12834}
12835
12836
12837/** Opcode 0x0f 0xce. */
12838FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12839{
12840 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12841 IEMOP_HLP_MIN_486();
12842 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12843}
12844
12845
12846/** Opcode 0x0f 0xcf. */
12847FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12848{
12849 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12850 IEMOP_HLP_MIN_486();
12851 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12852}
12853
12854
12855/* Opcode 0x0f 0xd0 - invalid */
12856
12857
12858/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12859FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12860{
12861 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12862 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12863}
12864
12865
12866/* Opcode 0xf3 0x0f 0xd0 - invalid */
12867
12868
12869/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12870FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12871{
12872 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12873 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12874}
12875
12876
12877
12878/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12879FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12880{
12881 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12882 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12883}
12884
12885/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12886FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12887{
12888 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12889 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12890}
12891
12892/* Opcode 0xf3 0x0f 0xd1 - invalid */
12893/* Opcode 0xf2 0x0f 0xd1 - invalid */
12894
12895/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12896FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12897{
12898 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12899 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12900}
12901
12902
12903/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12904FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12905{
12906 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12907 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12908}
12909
12910
12911/* Opcode 0xf3 0x0f 0xd2 - invalid */
12912/* Opcode 0xf2 0x0f 0xd2 - invalid */
12913
12914/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12915FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12916{
12917 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12918 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12919}
12920
12921
12922/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12923FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12924{
12925 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12926 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12927}
12928
12929
12930/* Opcode 0xf3 0x0f 0xd3 - invalid */
12931/* Opcode 0xf2 0x0f 0xd3 - invalid */
12932
12933
12934/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12935FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12936{
12937 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12938 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12939}
12940
12941
12942/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12943FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12944{
12945 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12946 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddq_u128);
12947}
12948
12949
12950/* Opcode 0xf3 0x0f 0xd4 - invalid */
12951/* Opcode 0xf2 0x0f 0xd4 - invalid */
12952
12953/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12954FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12955{
12956 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12957 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12958}
12959
12960/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12961FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12962{
12963 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12964 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmullw_u128);
12965}
12966
12967
12968/* Opcode 0xf3 0x0f 0xd5 - invalid */
12969/* Opcode 0xf2 0x0f 0xd5 - invalid */
12970
12971/* Opcode 0x0f 0xd6 - invalid */
12972
12973/**
12974 * @opcode 0xd6
12975 * @oppfx 0x66
12976 * @opcpuid sse2
12977 * @opgroup og_sse2_pcksclr_datamove
12978 * @opxcpttype none
12979 * @optest op1=-1 op2=2 -> op1=2
12980 * @optest op1=0 op2=-42 -> op1=-42
12981 */
12982FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12983{
12984 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12986 if (IEM_IS_MODRM_REG_MODE(bRm))
12987 {
12988 /*
12989 * Register, register.
12990 */
12991 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12993 IEM_MC_LOCAL(uint64_t, uSrc);
12994
12995 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12996 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12997
12998 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12999 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
13000
13001 IEM_MC_ADVANCE_RIP_AND_FINISH();
13002 IEM_MC_END();
13003 }
13004 else
13005 {
13006 /*
13007 * Memory, register.
13008 */
13009 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13010 IEM_MC_LOCAL(uint64_t, uSrc);
13011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13012
13013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13015 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13016 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13017
13018 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13019 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13020
13021 IEM_MC_ADVANCE_RIP_AND_FINISH();
13022 IEM_MC_END();
13023 }
13024}
13025
13026
13027/**
13028 * @opcode 0xd6
13029 * @opcodesub 11 mr/reg
13030 * @oppfx f3
13031 * @opcpuid sse2
13032 * @opgroup og_sse2_simdint_datamove
13033 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13034 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13035 */
13036FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13037{
13038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13039 if (IEM_IS_MODRM_REG_MODE(bRm))
13040 {
13041 /*
13042 * Register, register.
13043 */
13044 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13045 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13047 IEM_MC_LOCAL(uint64_t, uSrc);
13048
13049 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13050 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13051 IEM_MC_FPU_TO_MMX_MODE();
13052
13053 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13054 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13055
13056 IEM_MC_ADVANCE_RIP_AND_FINISH();
13057 IEM_MC_END();
13058 }
13059
13060 /**
13061 * @opdone
13062 * @opmnemonic udf30fd6mem
13063 * @opcode 0xd6
13064 * @opcodesub !11 mr/reg
13065 * @oppfx f3
13066 * @opunused intel-modrm
13067 * @opcpuid sse
13068 * @optest ->
13069 */
13070 else
13071 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13072}
13073
13074
13075/**
13076 * @opcode 0xd6
13077 * @opcodesub 11 mr/reg
13078 * @oppfx f2
13079 * @opcpuid sse2
13080 * @opgroup og_sse2_simdint_datamove
13081 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13082 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13083 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13084 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13085 * @optest op1=-42 op2=0xfedcba9876543210
13086 * -> op1=0xfedcba9876543210 ftw=0xff
13087 */
13088FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13089{
13090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13091 if (IEM_IS_MODRM_REG_MODE(bRm))
13092 {
13093 /*
13094 * Register, register.
13095 */
13096 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13097 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13099 IEM_MC_LOCAL(uint64_t, uSrc);
13100
13101 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13102 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13103 IEM_MC_FPU_TO_MMX_MODE();
13104
13105 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13106 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13107
13108 IEM_MC_ADVANCE_RIP_AND_FINISH();
13109 IEM_MC_END();
13110 }
13111
13112 /**
13113 * @opdone
13114 * @opmnemonic udf20fd6mem
13115 * @opcode 0xd6
13116 * @opcodesub !11 mr/reg
13117 * @oppfx f2
13118 * @opunused intel-modrm
13119 * @opcpuid sse
13120 * @optest ->
13121 */
13122 else
13123 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13124}
13125
13126
13127/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13128FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13129{
13130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13131 /* Docs says register only. */
13132 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13133 {
13134 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13135 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13136 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13138 IEM_MC_ARG(uint64_t *, puDst, 0);
13139 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13140 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13141 IEM_MC_PREPARE_FPU_USAGE();
13142 IEM_MC_FPU_TO_MMX_MODE();
13143
13144 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13145 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13146 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13147
13148 IEM_MC_ADVANCE_RIP_AND_FINISH();
13149 IEM_MC_END();
13150 }
13151 else
13152 IEMOP_RAISE_INVALID_OPCODE_RET();
13153}
13154
13155
13156/** Opcode 0x66 0x0f 0xd7 - */
13157FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13158{
13159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13160 /* Docs says register only. */
13161 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13162 {
13163 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13164 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13165 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13167 IEM_MC_ARG(uint64_t *, puDst, 0);
13168 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13169 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13170 IEM_MC_PREPARE_SSE_USAGE();
13171 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13172 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13173 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13174 IEM_MC_ADVANCE_RIP_AND_FINISH();
13175 IEM_MC_END();
13176 }
13177 else
13178 IEMOP_RAISE_INVALID_OPCODE_RET();
13179}
13180
13181
13182/* Opcode 0xf3 0x0f 0xd7 - invalid */
13183/* Opcode 0xf2 0x0f 0xd7 - invalid */
13184
13185
13186/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13187FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13188{
13189 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13190 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13191}
13192
13193
13194/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13195FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13196{
13197 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13198 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13199}
13200
13201
13202/* Opcode 0xf3 0x0f 0xd8 - invalid */
13203/* Opcode 0xf2 0x0f 0xd8 - invalid */
13204
13205/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13206FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13207{
13208 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13209 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13210}
13211
13212
13213/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13214FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13215{
13216 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13217 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13218}
13219
13220
13221/* Opcode 0xf3 0x0f 0xd9 - invalid */
13222/* Opcode 0xf2 0x0f 0xd9 - invalid */
13223
13224/** Opcode 0x0f 0xda - pminub Pq, Qq */
13225FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13226{
13227 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13228 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13229}
13230
13231
13232/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13233FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13234{
13235 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13236 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13237}
13238
13239/* Opcode 0xf3 0x0f 0xda - invalid */
13240/* Opcode 0xf2 0x0f 0xda - invalid */
13241
13242/** Opcode 0x0f 0xdb - pand Pq, Qq */
13243FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13244{
13245 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13246 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13247}
13248
13249
13250/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13251FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13252{
13253 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13254 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pand_u128);
13255}
13256
13257
13258/* Opcode 0xf3 0x0f 0xdb - invalid */
13259/* Opcode 0xf2 0x0f 0xdb - invalid */
13260
13261/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13262FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13263{
13264 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13265 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13266}
13267
13268
13269/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13270FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13271{
13272 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13273 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddusb_u128);
13274}
13275
13276
13277/* Opcode 0xf3 0x0f 0xdc - invalid */
13278/* Opcode 0xf2 0x0f 0xdc - invalid */
13279
13280/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13281FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13282{
13283 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13284 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13285}
13286
13287
13288/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13289FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13290{
13291 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13292 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddusw_u128);
13293}
13294
13295
13296/* Opcode 0xf3 0x0f 0xdd - invalid */
13297/* Opcode 0xf2 0x0f 0xdd - invalid */
13298
13299/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13300FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13301{
13302 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13303 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13304}
13305
13306
13307/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13308FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13309{
13310 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13311 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13312}
13313
13314/* Opcode 0xf3 0x0f 0xde - invalid */
13315/* Opcode 0xf2 0x0f 0xde - invalid */
13316
13317
13318/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13319FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13320{
13321 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13322 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13323}
13324
13325
13326/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13327FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13328{
13329 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13330 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13331}
13332
13333
13334/* Opcode 0xf3 0x0f 0xdf - invalid */
13335/* Opcode 0xf2 0x0f 0xdf - invalid */
13336
13337/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13338FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13339{
13340 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13341 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13342}
13343
13344
13345/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13346FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13347{
13348 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13349 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13350}
13351
13352
13353/* Opcode 0xf3 0x0f 0xe0 - invalid */
13354/* Opcode 0xf2 0x0f 0xe0 - invalid */
13355
13356/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13357FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13358{
13359 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13360 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13361}
13362
13363
13364/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13365FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13366{
13367 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13368 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13369}
13370
13371
13372/* Opcode 0xf3 0x0f 0xe1 - invalid */
13373/* Opcode 0xf2 0x0f 0xe1 - invalid */
13374
13375/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13376FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13377{
13378 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13379 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13380}
13381
13382
13383/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13384FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13385{
13386 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13387 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13388}
13389
13390
13391/* Opcode 0xf3 0x0f 0xe2 - invalid */
13392/* Opcode 0xf2 0x0f 0xe2 - invalid */
13393
13394/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13395FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13396{
13397 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13398 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13399}
13400
13401
13402/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13403FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13404{
13405 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13406 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13407}
13408
13409
13410/* Opcode 0xf3 0x0f 0xe3 - invalid */
13411/* Opcode 0xf2 0x0f 0xe3 - invalid */
13412
13413/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13414FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13415{
13416 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13417 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13418}
13419
13420
13421/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13422FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13423{
13424 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13425 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13426}
13427
13428
13429/* Opcode 0xf3 0x0f 0xe4 - invalid */
13430/* Opcode 0xf2 0x0f 0xe4 - invalid */
13431
13432/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13433FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13434{
13435 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13436 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13437}
13438
13439
13440/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13441FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13442{
13443 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13444 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13445}
13446
13447
13448/* Opcode 0xf3 0x0f 0xe5 - invalid */
13449/* Opcode 0xf2 0x0f 0xe5 - invalid */
13450/* Opcode 0x0f 0xe6 - invalid */
13451
13452
13453/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13454FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13455{
13456 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13457 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13458}
13459
13460
13461/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13462FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13463{
13464 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13465 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13466}
13467
13468
13469/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13470FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13471{
13472 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13473 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13474}
13475
13476
13477/**
13478 * @opcode 0xe7
13479 * @opcodesub !11 mr/reg
13480 * @oppfx none
13481 * @opcpuid sse
13482 * @opgroup og_sse1_cachect
13483 * @opxcpttype none
13484 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13485 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13486 */
13487FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13488{
13489 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13491 if (IEM_IS_MODRM_MEM_MODE(bRm))
13492 {
13493 /* Register, memory. */
13494 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13495 IEM_MC_LOCAL(uint64_t, uSrc);
13496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13497
13498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13500 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13502 IEM_MC_FPU_TO_MMX_MODE();
13503
13504 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13505 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13506
13507 IEM_MC_ADVANCE_RIP_AND_FINISH();
13508 IEM_MC_END();
13509 }
13510 /**
13511 * @opdone
13512 * @opmnemonic ud0fe7reg
13513 * @opcode 0xe7
13514 * @opcodesub 11 mr/reg
13515 * @oppfx none
13516 * @opunused immediate
13517 * @opcpuid sse
13518 * @optest ->
13519 */
13520 else
13521 IEMOP_RAISE_INVALID_OPCODE_RET();
13522}
13523
13524/**
13525 * @opcode 0xe7
13526 * @opcodesub !11 mr/reg
13527 * @oppfx 0x66
13528 * @opcpuid sse2
13529 * @opgroup og_sse2_cachect
13530 * @opxcpttype 1
13531 * @optest op1=-1 op2=2 -> op1=2
13532 * @optest op1=0 op2=-42 -> op1=-42
13533 */
13534FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13535{
13536 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13538 if (IEM_IS_MODRM_MEM_MODE(bRm))
13539 {
13540 /* Register, memory. */
13541 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13542 IEM_MC_LOCAL(RTUINT128U, uSrc);
13543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13544
13545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13549
13550 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13551 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13552
13553 IEM_MC_ADVANCE_RIP_AND_FINISH();
13554 IEM_MC_END();
13555 }
13556
13557 /**
13558 * @opdone
13559 * @opmnemonic ud660fe7reg
13560 * @opcode 0xe7
13561 * @opcodesub 11 mr/reg
13562 * @oppfx 0x66
13563 * @opunused immediate
13564 * @opcpuid sse
13565 * @optest ->
13566 */
13567 else
13568 IEMOP_RAISE_INVALID_OPCODE_RET();
13569}
13570
13571/* Opcode 0xf3 0x0f 0xe7 - invalid */
13572/* Opcode 0xf2 0x0f 0xe7 - invalid */
13573
13574
13575/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13576FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13577{
13578 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13579 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13580}
13581
13582
13583/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13584FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13585{
13586 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13587 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13588}
13589
13590
13591/* Opcode 0xf3 0x0f 0xe8 - invalid */
13592/* Opcode 0xf2 0x0f 0xe8 - invalid */
13593
13594/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13595FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13596{
13597 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13598 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13599}
13600
13601
13602/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13603FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13604{
13605 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13606 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13607}
13608
13609
13610/* Opcode 0xf3 0x0f 0xe9 - invalid */
13611/* Opcode 0xf2 0x0f 0xe9 - invalid */
13612
13613
13614/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13615FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13616{
13617 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13618 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13619}
13620
13621
13622/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13623FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13624{
13625 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13626 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13627}
13628
13629
13630/* Opcode 0xf3 0x0f 0xea - invalid */
13631/* Opcode 0xf2 0x0f 0xea - invalid */
13632
13633
13634/** Opcode 0x0f 0xeb - por Pq, Qq */
13635FNIEMOP_DEF(iemOp_por_Pq_Qq)
13636{
13637 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13638 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13639}
13640
13641
13642/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13643FNIEMOP_DEF(iemOp_por_Vx_Wx)
13644{
13645 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13646 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_por_u128);
13647}
13648
13649
13650/* Opcode 0xf3 0x0f 0xeb - invalid */
13651/* Opcode 0xf2 0x0f 0xeb - invalid */
13652
13653/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13654FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13655{
13656 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13657 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13658}
13659
13660
13661/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13662FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13663{
13664 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13665 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13666}
13667
13668
13669/* Opcode 0xf3 0x0f 0xec - invalid */
13670/* Opcode 0xf2 0x0f 0xec - invalid */
13671
13672/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13673FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13674{
13675 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13676 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13677}
13678
13679
13680/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13681FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13682{
13683 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13684 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13685}
13686
13687
13688/* Opcode 0xf3 0x0f 0xed - invalid */
13689/* Opcode 0xf2 0x0f 0xed - invalid */
13690
13691
13692/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13693FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13694{
13695 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13696 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13697}
13698
13699
13700/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13701FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13702{
13703 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13704 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13705}
13706
13707
13708/* Opcode 0xf3 0x0f 0xee - invalid */
13709/* Opcode 0xf2 0x0f 0xee - invalid */
13710
13711
13712/** Opcode 0x0f 0xef - pxor Pq, Qq */
13713FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13714{
13715 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13716 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13717}
13718
13719
13720/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13721FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13722{
13723 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13724 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pxor_u128);
13725}
13726
13727
13728/* Opcode 0xf3 0x0f 0xef - invalid */
13729/* Opcode 0xf2 0x0f 0xef - invalid */
13730
13731/* Opcode 0x0f 0xf0 - invalid */
13732/* Opcode 0x66 0x0f 0xf0 - invalid */
13733
13734
13735/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13736FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13737{
13738 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13740 if (IEM_IS_MODRM_REG_MODE(bRm))
13741 {
13742 /*
13743 * Register, register - (not implemented, assuming it raises \#UD).
13744 */
13745 IEMOP_RAISE_INVALID_OPCODE_RET();
13746 }
13747 else
13748 {
13749 /*
13750 * Register, memory.
13751 */
13752 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13753 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13755
13756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13758 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13759 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13760 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13761 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13762
13763 IEM_MC_ADVANCE_RIP_AND_FINISH();
13764 IEM_MC_END();
13765 }
13766}
13767
13768
13769/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13770FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13771{
13772 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13773 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13774}
13775
13776
13777/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13778FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13779{
13780 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13781 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13782}
13783
13784
13785/* Opcode 0xf2 0x0f 0xf1 - invalid */
13786
13787/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13788FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13789{
13790 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13791 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13792}
13793
13794
13795/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13796FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13797{
13798 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13799 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13800}
13801
13802
13803/* Opcode 0xf2 0x0f 0xf2 - invalid */
13804
13805/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13806FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13807{
13808 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13809 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13810}
13811
13812
13813/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13814FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13815{
13816 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13817 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13818}
13819
13820/* Opcode 0xf2 0x0f 0xf3 - invalid */
13821
13822/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13823FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13824{
13825 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13826 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13827}
13828
13829
13830/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13831FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13832{
13833 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13834 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13835}
13836
13837
13838/* Opcode 0xf2 0x0f 0xf4 - invalid */
13839
13840/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13841FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13842{
13843 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13844 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13845}
13846
13847
13848/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13849FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13850{
13851 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13852 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13853}
13854
13855/* Opcode 0xf2 0x0f 0xf5 - invalid */
13856
13857/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13858FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13859{
13860 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13861 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13862}
13863
13864
13865/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13866FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13867{
13868 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13869 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13870}
13871
13872
13873/* Opcode 0xf2 0x0f 0xf6 - invalid */
13874
13875/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13876FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13877/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13878FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13879/* Opcode 0xf2 0x0f 0xf7 - invalid */
13880
13881
13882/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13883FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13884{
13885 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13886 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13887}
13888
13889
13890/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13891FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13892{
13893 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13894 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubb_u128);
13895}
13896
13897
13898/* Opcode 0xf2 0x0f 0xf8 - invalid */
13899
13900
13901/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13902FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13903{
13904 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13905 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13906}
13907
13908
13909/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13910FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13911{
13912 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13913 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubw_u128);
13914}
13915
13916
13917/* Opcode 0xf2 0x0f 0xf9 - invalid */
13918
13919
13920/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13921FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13922{
13923 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13924 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13925}
13926
13927
13928/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13929FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13930{
13931 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13932 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubd_u128);
13933}
13934
13935
13936/* Opcode 0xf2 0x0f 0xfa - invalid */
13937
13938
13939/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13940FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13941{
13942 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13943 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13944}
13945
13946
13947/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13948FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13949{
13950 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13951 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubq_u128);
13952}
13953
13954
13955/* Opcode 0xf2 0x0f 0xfb - invalid */
13956
13957
13958/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13959FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13960{
13961 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13962 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
13963}
13964
13965
13966/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13967FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13968{
13969 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13970 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddb_u128);
13971}
13972
13973
13974/* Opcode 0xf2 0x0f 0xfc - invalid */
13975
13976
13977/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13978FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13979{
13980 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13981 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
13982}
13983
13984
13985/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13986FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13987{
13988 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13989 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddw_u128);
13990}
13991
13992
13993/* Opcode 0xf2 0x0f 0xfd - invalid */
13994
13995
13996/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13997FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13998{
13999 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14000 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
14001}
14002
14003
14004/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14005FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14006{
14007 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14008 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddd_u128);
14009}
14010
14011
14012/* Opcode 0xf2 0x0f 0xfe - invalid */
14013
14014
14015/** Opcode **** 0x0f 0xff - UD0 */
14016FNIEMOP_DEF(iemOp_ud0)
14017{
14018 IEMOP_MNEMONIC(ud0, "ud0");
14019 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14020 {
14021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14022 if (IEM_IS_MODRM_MEM_MODE(bRm))
14023 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14024 }
14025 IEMOP_HLP_DONE_DECODING();
14026 IEMOP_RAISE_INVALID_OPCODE_RET();
14027}
14028
14029
14030
14031/**
14032 * Two byte opcode map, first byte 0x0f.
14033 *
14034 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14035 * check if it needs updating as well when making changes.
14036 */
14037const PFNIEMOP g_apfnTwoByteMap[] =
14038{
14039 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14040 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14041 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14042 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14043 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14044 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14045 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14046 /* 0x06 */ IEMOP_X4(iemOp_clts),
14047 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14048 /* 0x08 */ IEMOP_X4(iemOp_invd),
14049 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14050 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14051 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14052 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14053 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14054 /* 0x0e */ IEMOP_X4(iemOp_femms),
14055 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14056
14057 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14058 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14059 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14060 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14061 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14062 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14063 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14064 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14065 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14066 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14067 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14068 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14069 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14070 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14071 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14072 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14073
14074 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14075 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14076 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14077 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14078 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14079 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14080 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14081 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14082 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14083 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14084 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14085 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14086 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14087 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14088 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14089 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14090
14091 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14092 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14093 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14094 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14095 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14096 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14097 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14098 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14099 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14100 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14101 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14102 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14103 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14104 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14105 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14106 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14107
14108 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14109 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14110 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14111 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14112 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14113 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14114 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14115 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14116 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14117 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14118 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14119 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14120 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14121 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14122 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14123 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14124
14125 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14126 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14127 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14128 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14129 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14130 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14131 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14132 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14133 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14134 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14135 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14136 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14137 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14138 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14139 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14140 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14141
14142 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14143 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14144 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14145 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14146 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14147 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14148 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14149 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14150 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14151 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14152 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14153 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14154 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14155 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14156 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14157 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14158
14159 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14160 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14161 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14162 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14163 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14164 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14165 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14166 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14167
14168 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14169 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14170 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14171 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14172 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14173 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14174 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14175 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14176
14177 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14178 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14179 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14180 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14181 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14182 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14183 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14184 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14185 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14186 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14187 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14188 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14189 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14190 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14191 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14192 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14193
14194 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14195 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14196 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14197 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14198 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14199 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14200 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14201 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14202 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14203 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14204 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14205 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14206 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14207 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14208 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14209 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14210
14211 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14212 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14213 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14214 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14215 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14216 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14217 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14218 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14219 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14220 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14221 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14222 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14223 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14224 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14225 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14226 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14227
14228 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14229 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14230 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14231 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14232 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14233 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14234 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14235 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14236 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14237 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14238 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14239 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14240 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14241 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14242 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14243 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14244
14245 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14246 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14247 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14248 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14249 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14250 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14251 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14252 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14253 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14254 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14255 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14256 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14257 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14258 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14259 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14260 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14261
14262 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14263 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14264 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14265 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14266 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14267 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14268 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14269 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14270 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14271 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14272 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14273 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14274 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14275 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14276 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14277 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14278
14279 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14280 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14281 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14282 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14283 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14284 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14285 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14286 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14287 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14288 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14289 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14290 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14291 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14292 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14293 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14294 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14295
14296 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14297 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14298 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14299 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14300 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14301 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14302 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14303 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14305 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14306 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14307 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14308 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14309 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xff */ IEMOP_X4(iemOp_ud0),
14312};
14313AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14314
14315/** @} */
14316
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette