VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96962

Last change on this file since 96962 was 96955, checked in by vboxsync, 2 years ago

IEM: Nits, some SSE instructions were misclassified as SSE2.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 477.2 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96955 2022-09-30 15:51:48Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * Register, register.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * Register, memory.
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP();
92 IEM_MC_END();
93 }
94 return VINF_SUCCESS;
95}
96
97
98/**
99 * Common worker for MMX instructions on the form:
100 * pxxx mm1, mm2/mem64
101 *
102 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
103 * no FXSAVE state, just the operands.
104 */
105FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
106{
107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
108 if (IEM_IS_MODRM_REG_MODE(bRm))
109 {
110 /*
111 * Register, register.
112 */
113 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
114 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
116 IEM_MC_BEGIN(2, 0);
117 IEM_MC_ARG(uint64_t *, pDst, 0);
118 IEM_MC_ARG(uint64_t const *, pSrc, 1);
119 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
120 IEM_MC_PREPARE_FPU_USAGE();
121 IEM_MC_FPU_TO_MMX_MODE();
122
123 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
124 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
125 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
126 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
127
128 IEM_MC_ADVANCE_RIP();
129 IEM_MC_END();
130 }
131 else
132 {
133 /*
134 * Register, memory.
135 */
136 IEM_MC_BEGIN(2, 2);
137 IEM_MC_ARG(uint64_t *, pDst, 0);
138 IEM_MC_LOCAL(uint64_t, uSrc);
139 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
141
142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
144 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
145 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
146
147 IEM_MC_PREPARE_FPU_USAGE();
148 IEM_MC_FPU_TO_MMX_MODE();
149
150 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
151 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
152 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
153
154 IEM_MC_ADVANCE_RIP();
155 IEM_MC_END();
156 }
157 return VINF_SUCCESS;
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * for instructions introduced with SSE.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * Register, register.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
177 IEM_MC_BEGIN(2, 0);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * Register, memory.
196 */
197 IEM_MC_BEGIN(2, 2);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 }
218 return VINF_SUCCESS;
219}
220
221
222/**
223 * Common worker for MMX instructions on the form:
224 * pxxx mm1, mm2/mem64
225 * for instructions introduced with SSE.
226 *
227 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
228 * no FXSAVE state, just the operands.
229 */
230FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
231{
232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
233 if (IEM_IS_MODRM_REG_MODE(bRm))
234 {
235 /*
236 * Register, register.
237 */
238 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
239 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
241 IEM_MC_BEGIN(2, 0);
242 IEM_MC_ARG(uint64_t *, pDst, 0);
243 IEM_MC_ARG(uint64_t const *, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
245 IEM_MC_PREPARE_FPU_USAGE();
246 IEM_MC_FPU_TO_MMX_MODE();
247
248 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
249 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
250 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
251 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
252
253 IEM_MC_ADVANCE_RIP();
254 IEM_MC_END();
255 }
256 else
257 {
258 /*
259 * Register, memory.
260 */
261 IEM_MC_BEGIN(2, 2);
262 IEM_MC_ARG(uint64_t *, pDst, 0);
263 IEM_MC_LOCAL(uint64_t, uSrc);
264 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
266
267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
269 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
270 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
271
272 IEM_MC_PREPARE_FPU_USAGE();
273 IEM_MC_FPU_TO_MMX_MODE();
274
275 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
276 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
277 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
278
279 IEM_MC_ADVANCE_RIP();
280 IEM_MC_END();
281 }
282 return VINF_SUCCESS;
283}
284
285
286/**
287 * Common worker for MMX instructions on the form:
288 * pxxx mm1, mm2/mem64
289 * that was introduced with SSE2.
290 */
291FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
292{
293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
294 if (IEM_IS_MODRM_REG_MODE(bRm))
295 {
296 /*
297 * Register, register.
298 */
299 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
300 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_BEGIN(2, 0);
303 IEM_MC_ARG(uint64_t *, pDst, 0);
304 IEM_MC_ARG(uint64_t const *, pSrc, 1);
305 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
306 IEM_MC_PREPARE_FPU_USAGE();
307 IEM_MC_FPU_TO_MMX_MODE();
308
309 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
310 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313
314 IEM_MC_ADVANCE_RIP();
315 IEM_MC_END();
316 }
317 else
318 {
319 /*
320 * Register, memory.
321 */
322 IEM_MC_BEGIN(2, 2);
323 IEM_MC_ARG(uint64_t *, pDst, 0);
324 IEM_MC_LOCAL(uint64_t, uSrc);
325 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
327
328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
330 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
331 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
332
333 IEM_MC_PREPARE_FPU_USAGE();
334 IEM_MC_FPU_TO_MMX_MODE();
335
336 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
337 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
338 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
339
340 IEM_MC_ADVANCE_RIP();
341 IEM_MC_END();
342 }
343 return VINF_SUCCESS;
344}
345
346
347/**
348 * Common worker for SSE instructions of the form:
349 * pxxx xmm1, xmm2/mem128
350 *
351 * Proper alignment of the 128-bit operand is enforced.
352 * SSE cpuid checks. No SIMD FP exceptions.
353 *
354 * @sa iemOpCommonSse2_FullFull_To_Full
355 */
356FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
357{
358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
359 if (IEM_IS_MODRM_REG_MODE(bRm))
360 {
361 /*
362 * Register, register.
363 */
364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
365 IEM_MC_BEGIN(2, 0);
366 IEM_MC_ARG(PRTUINT128U, pDst, 0);
367 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
368 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
369 IEM_MC_PREPARE_SSE_USAGE();
370 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
371 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
372 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 }
376 else
377 {
378 /*
379 * Register, memory.
380 */
381 IEM_MC_BEGIN(2, 2);
382 IEM_MC_ARG(PRTUINT128U, pDst, 0);
383 IEM_MC_LOCAL(RTUINT128U, uSrc);
384 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
386
387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
389 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
390 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
391
392 IEM_MC_PREPARE_SSE_USAGE();
393 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
394 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
395
396 IEM_MC_ADVANCE_RIP();
397 IEM_MC_END();
398 }
399 return VINF_SUCCESS;
400}
401
402
403/**
404 * Common worker for SSE2 instructions on the forms:
405 * pxxx xmm1, xmm2/mem128
406 *
407 * Proper alignment of the 128-bit operand is enforced.
408 * Exceptions type 4. SSE2 cpuid checks.
409 *
410 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
411 */
412FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
413{
414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
415 if (IEM_IS_MODRM_REG_MODE(bRm))
416 {
417 /*
418 * Register, register.
419 */
420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
421 IEM_MC_BEGIN(2, 0);
422 IEM_MC_ARG(PRTUINT128U, pDst, 0);
423 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
424 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
425 IEM_MC_PREPARE_SSE_USAGE();
426 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
427 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
428 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
429 IEM_MC_ADVANCE_RIP();
430 IEM_MC_END();
431 }
432 else
433 {
434 /*
435 * Register, memory.
436 */
437 IEM_MC_BEGIN(2, 2);
438 IEM_MC_ARG(PRTUINT128U, pDst, 0);
439 IEM_MC_LOCAL(RTUINT128U, uSrc);
440 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
442
443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
445 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
446 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
447
448 IEM_MC_PREPARE_SSE_USAGE();
449 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
450 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
451
452 IEM_MC_ADVANCE_RIP();
453 IEM_MC_END();
454 }
455 return VINF_SUCCESS;
456}
457
458
459/**
460 * Common worker for SSE2 instructions on the forms:
461 * pxxx xmm1, xmm2/mem128
462 *
463 * Proper alignment of the 128-bit operand is enforced.
464 * Exceptions type 4. SSE2 cpuid checks.
465 *
466 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
467 * no FXSAVE state, just the operands.
468 *
469 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
470 */
471FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
472{
473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
474 if (IEM_IS_MODRM_REG_MODE(bRm))
475 {
476 /*
477 * Register, register.
478 */
479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
480 IEM_MC_BEGIN(2, 0);
481 IEM_MC_ARG(PRTUINT128U, pDst, 0);
482 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
484 IEM_MC_PREPARE_SSE_USAGE();
485 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
486 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
487 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 }
491 else
492 {
493 /*
494 * Register, memory.
495 */
496 IEM_MC_BEGIN(2, 2);
497 IEM_MC_ARG(PRTUINT128U, pDst, 0);
498 IEM_MC_LOCAL(RTUINT128U, uSrc);
499 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
501
502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
504 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
505 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
506
507 IEM_MC_PREPARE_SSE_USAGE();
508 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
509 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
510
511 IEM_MC_ADVANCE_RIP();
512 IEM_MC_END();
513 }
514 return VINF_SUCCESS;
515}
516
517
518/**
519 * Common worker for MMX instructions on the forms:
520 * pxxxx mm1, mm2/mem32
521 *
522 * The 2nd operand is the first half of a register, which in the memory case
523 * means a 32-bit memory access.
524 */
525FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
526{
527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
528 if (IEM_IS_MODRM_REG_MODE(bRm))
529 {
530 /*
531 * Register, register.
532 */
533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
534 IEM_MC_BEGIN(2, 0);
535 IEM_MC_ARG(uint64_t *, puDst, 0);
536 IEM_MC_ARG(uint64_t const *, puSrc, 1);
537 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
538 IEM_MC_PREPARE_FPU_USAGE();
539 IEM_MC_FPU_TO_MMX_MODE();
540
541 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
542 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
543 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
544 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
545
546 IEM_MC_ADVANCE_RIP();
547 IEM_MC_END();
548 }
549 else
550 {
551 /*
552 * Register, memory.
553 */
554 IEM_MC_BEGIN(2, 2);
555 IEM_MC_ARG(uint64_t *, puDst, 0);
556 IEM_MC_LOCAL(uint64_t, uSrc);
557 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
559
560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
562 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
563 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
564
565 IEM_MC_PREPARE_FPU_USAGE();
566 IEM_MC_FPU_TO_MMX_MODE();
567
568 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
569 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
570 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
571
572 IEM_MC_ADVANCE_RIP();
573 IEM_MC_END();
574 }
575 return VINF_SUCCESS;
576}
577
578
579/**
580 * Common worker for SSE instructions on the forms:
581 * pxxxx xmm1, xmm2/mem128
582 *
583 * The 2nd operand is the first half of a register, which in the memory case
584 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
585 *
586 * Exceptions type 4.
587 */
588FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
589{
590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
591 if (IEM_IS_MODRM_REG_MODE(bRm))
592 {
593 /*
594 * Register, register.
595 */
596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
597 IEM_MC_BEGIN(2, 0);
598 IEM_MC_ARG(PRTUINT128U, puDst, 0);
599 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
600 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
601 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
602 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
603 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
604 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
605 IEM_MC_ADVANCE_RIP();
606 IEM_MC_END();
607 }
608 else
609 {
610 /*
611 * Register, memory.
612 */
613 IEM_MC_BEGIN(2, 2);
614 IEM_MC_ARG(PRTUINT128U, puDst, 0);
615 IEM_MC_LOCAL(RTUINT128U, uSrc);
616 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
618
619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
621 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
622 /** @todo Most CPUs probably only read the low qword. We read everything to
623 * make sure we apply segmentation and alignment checks correctly.
624 * When we have time, it would be interesting to explore what real
625 * CPUs actually does and whether it will do a TLB load for the high
626 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
627 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
628
629 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
630 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
631 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
632
633 IEM_MC_ADVANCE_RIP();
634 IEM_MC_END();
635 }
636 return VINF_SUCCESS;
637}
638
639
640/**
641 * Common worker for SSE2 instructions on the forms:
642 * pxxxx xmm1, xmm2/mem128
643 *
644 * The 2nd operand is the first half of a register, which in the memory case
645 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
646 *
647 * Exceptions type 4.
648 */
649FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
650{
651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
652 if (IEM_IS_MODRM_REG_MODE(bRm))
653 {
654 /*
655 * Register, register.
656 */
657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
658 IEM_MC_BEGIN(2, 0);
659 IEM_MC_ARG(PRTUINT128U, puDst, 0);
660 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
661 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
662 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
663 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
664 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
665 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
666 IEM_MC_ADVANCE_RIP();
667 IEM_MC_END();
668 }
669 else
670 {
671 /*
672 * Register, memory.
673 */
674 IEM_MC_BEGIN(2, 2);
675 IEM_MC_ARG(PRTUINT128U, puDst, 0);
676 IEM_MC_LOCAL(RTUINT128U, uSrc);
677 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
679
680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
682 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
683 /** @todo Most CPUs probably only read the low qword. We read everything to
684 * make sure we apply segmentation and alignment checks correctly.
685 * When we have time, it would be interesting to explore what real
686 * CPUs actually does and whether it will do a TLB load for the high
687 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
688 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
689
690 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
691 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
692 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
693
694 IEM_MC_ADVANCE_RIP();
695 IEM_MC_END();
696 }
697 return VINF_SUCCESS;
698}
699
700
701/**
702 * Common worker for MMX instructions on the form:
703 * pxxxx mm1, mm2/mem64
704 *
705 * The 2nd operand is the second half of a register, which in the memory case
706 * means a 64-bit memory access for MMX.
707 */
708FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
709{
710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
711 if (IEM_IS_MODRM_REG_MODE(bRm))
712 {
713 /*
714 * Register, register.
715 */
716 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
717 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
719 IEM_MC_BEGIN(2, 0);
720 IEM_MC_ARG(uint64_t *, puDst, 0);
721 IEM_MC_ARG(uint64_t const *, puSrc, 1);
722 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
723 IEM_MC_PREPARE_FPU_USAGE();
724 IEM_MC_FPU_TO_MMX_MODE();
725
726 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
727 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
728 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
729 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
730
731 IEM_MC_ADVANCE_RIP();
732 IEM_MC_END();
733 }
734 else
735 {
736 /*
737 * Register, memory.
738 */
739 IEM_MC_BEGIN(2, 2);
740 IEM_MC_ARG(uint64_t *, puDst, 0);
741 IEM_MC_LOCAL(uint64_t, uSrc);
742 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
744
745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
747 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
748 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
749
750 IEM_MC_PREPARE_FPU_USAGE();
751 IEM_MC_FPU_TO_MMX_MODE();
752
753 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
754 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
755 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
756
757 IEM_MC_ADVANCE_RIP();
758 IEM_MC_END();
759 }
760 return VINF_SUCCESS;
761}
762
763
764/**
765 * Common worker for SSE instructions on the form:
766 * pxxxx xmm1, xmm2/mem128
767 *
768 * The 2nd operand is the second half of a register, which for SSE a 128-bit
769 * aligned access where it may read the full 128 bits or only the upper 64 bits.
770 *
771 * Exceptions type 4.
772 */
773FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
774{
775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
776 if (IEM_IS_MODRM_REG_MODE(bRm))
777 {
778 /*
779 * Register, register.
780 */
781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
782 IEM_MC_BEGIN(2, 0);
783 IEM_MC_ARG(PRTUINT128U, puDst, 0);
784 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
785 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
786 IEM_MC_PREPARE_SSE_USAGE();
787 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
788 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
789 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
790 IEM_MC_ADVANCE_RIP();
791 IEM_MC_END();
792 }
793 else
794 {
795 /*
796 * Register, memory.
797 */
798 IEM_MC_BEGIN(2, 2);
799 IEM_MC_ARG(PRTUINT128U, puDst, 0);
800 IEM_MC_LOCAL(RTUINT128U, uSrc);
801 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
803
804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
806 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
807 /** @todo Most CPUs probably only read the high qword. We read everything to
808 * make sure we apply segmentation and alignment checks correctly.
809 * When we have time, it would be interesting to explore what real
810 * CPUs actually does and whether it will do a TLB load for the lower
811 * part or skip any associated \#PF. */
812 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
813
814 IEM_MC_PREPARE_SSE_USAGE();
815 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
816 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
817
818 IEM_MC_ADVANCE_RIP();
819 IEM_MC_END();
820 }
821 return VINF_SUCCESS;
822}
823
824
825/**
826 * Common worker for SSE instructions on the forms:
827 * pxxs xmm1, xmm2/mem128
828 *
829 * Proper alignment of the 128-bit operand is enforced.
830 * Exceptions type 2. SSE cpuid checks.
831 *
832 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
833 */
834FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
835{
836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
837 if (IEM_IS_MODRM_REG_MODE(bRm))
838 {
839 /*
840 * Register, register.
841 */
842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
843 IEM_MC_BEGIN(3, 1);
844 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
845 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
846 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
847 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
848 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
849 IEM_MC_PREPARE_SSE_USAGE();
850 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
851 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
852 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
853 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
854 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
855
856 IEM_MC_ADVANCE_RIP();
857 IEM_MC_END();
858 }
859 else
860 {
861 /*
862 * Register, memory.
863 */
864 IEM_MC_BEGIN(3, 2);
865 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
866 IEM_MC_LOCAL(X86XMMREG, uSrc2);
867 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
868 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
869 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
871
872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
874 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
875 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
876
877 IEM_MC_PREPARE_SSE_USAGE();
878 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
879 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
880 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
881 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
882
883 IEM_MC_ADVANCE_RIP();
884 IEM_MC_END();
885 }
886 return VINF_SUCCESS;
887}
888
889
890/**
891 * Common worker for SSE instructions on the forms:
892 * pxxs xmm1, xmm2/mem32
893 *
894 * Proper alignment of the 128-bit operand is enforced.
895 * Exceptions type 2. SSE cpuid checks.
896 *
897 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
898 */
899FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
900{
901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
902 if (IEM_IS_MODRM_REG_MODE(bRm))
903 {
904 /*
905 * Register, register.
906 */
907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
908 IEM_MC_BEGIN(3, 1);
909 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
910 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
911 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
912 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
913 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
914 IEM_MC_PREPARE_SSE_USAGE();
915 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
916 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
917 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
918 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
919 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
920
921 IEM_MC_ADVANCE_RIP();
922 IEM_MC_END();
923 }
924 else
925 {
926 /*
927 * Register, memory.
928 */
929 IEM_MC_BEGIN(3, 2);
930 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
931 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
932 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
933 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
934 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
936
937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
939 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
940 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
941
942 IEM_MC_PREPARE_SSE_USAGE();
943 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
944 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
945 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
946 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
947
948 IEM_MC_ADVANCE_RIP();
949 IEM_MC_END();
950 }
951 return VINF_SUCCESS;
952}
953
954
955/**
956 * Common worker for SSE2 instructions on the forms:
957 * pxxd xmm1, xmm2/mem128
958 *
959 * Proper alignment of the 128-bit operand is enforced.
960 * Exceptions type 2. SSE cpuid checks.
961 *
962 * @sa iemOpCommonSseFp_FullFull_To_Full
963 */
964FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
965{
966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
967 if (IEM_IS_MODRM_REG_MODE(bRm))
968 {
969 /*
970 * Register, register.
971 */
972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
973 IEM_MC_BEGIN(3, 1);
974 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
975 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
976 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
977 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
978 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
979 IEM_MC_PREPARE_SSE_USAGE();
980 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
981 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
982 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
983 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
984 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
985
986 IEM_MC_ADVANCE_RIP();
987 IEM_MC_END();
988 }
989 else
990 {
991 /*
992 * Register, memory.
993 */
994 IEM_MC_BEGIN(3, 2);
995 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
996 IEM_MC_LOCAL(X86XMMREG, uSrc2);
997 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
998 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
999 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1001
1002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1005 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1006
1007 IEM_MC_PREPARE_SSE_USAGE();
1008 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1009 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1010 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1011 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1012
1013 IEM_MC_ADVANCE_RIP();
1014 IEM_MC_END();
1015 }
1016 return VINF_SUCCESS;
1017}
1018
1019
1020/**
1021 * Common worker for SSE2 instructions on the forms:
1022 * pxxs xmm1, xmm2/mem64
1023 *
1024 * Proper alignment of the 128-bit operand is enforced.
1025 * Exceptions type 2. SSE2 cpuid checks.
1026 *
1027 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1028 */
1029FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1030{
1031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1032 if (IEM_IS_MODRM_REG_MODE(bRm))
1033 {
1034 /*
1035 * Register, register.
1036 */
1037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1038 IEM_MC_BEGIN(3, 1);
1039 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1040 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1041 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1042 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1043 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1044 IEM_MC_PREPARE_SSE_USAGE();
1045 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1046 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1047 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1048 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1049 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1050
1051 IEM_MC_ADVANCE_RIP();
1052 IEM_MC_END();
1053 }
1054 else
1055 {
1056 /*
1057 * Register, memory.
1058 */
1059 IEM_MC_BEGIN(3, 2);
1060 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1061 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1062 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1063 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1064 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1066
1067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1069 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1070 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1071
1072 IEM_MC_PREPARE_SSE_USAGE();
1073 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1074 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1075 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1076 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1077
1078 IEM_MC_ADVANCE_RIP();
1079 IEM_MC_END();
1080 }
1081 return VINF_SUCCESS;
1082}
1083
1084
1085/**
1086 * Common worker for SSE2 instructions on the form:
1087 * pxxxx xmm1, xmm2/mem128
1088 *
1089 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1090 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1091 *
1092 * Exceptions type 4.
1093 */
1094FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1095{
1096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1097 if (IEM_IS_MODRM_REG_MODE(bRm))
1098 {
1099 /*
1100 * Register, register.
1101 */
1102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1103 IEM_MC_BEGIN(2, 0);
1104 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1105 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1106 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1107 IEM_MC_PREPARE_SSE_USAGE();
1108 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1109 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1110 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1111 IEM_MC_ADVANCE_RIP();
1112 IEM_MC_END();
1113 }
1114 else
1115 {
1116 /*
1117 * Register, memory.
1118 */
1119 IEM_MC_BEGIN(2, 2);
1120 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1121 IEM_MC_LOCAL(RTUINT128U, uSrc);
1122 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1124
1125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1127 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1128 /** @todo Most CPUs probably only read the high qword. We read everything to
1129 * make sure we apply segmentation and alignment checks correctly.
1130 * When we have time, it would be interesting to explore what real
1131 * CPUs actually does and whether it will do a TLB load for the lower
1132 * part or skip any associated \#PF. */
1133 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1134
1135 IEM_MC_PREPARE_SSE_USAGE();
1136 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1137 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1138
1139 IEM_MC_ADVANCE_RIP();
1140 IEM_MC_END();
1141 }
1142 return VINF_SUCCESS;
1143}
1144
1145
1146/**
1147 * Common worker for SSE3 instructions on the forms:
1148 * hxxx xmm1, xmm2/mem128
1149 *
1150 * Proper alignment of the 128-bit operand is enforced.
1151 * Exceptions type 2. SSE3 cpuid checks.
1152 *
1153 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1154 */
1155FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1156{
1157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1158 if (IEM_IS_MODRM_REG_MODE(bRm))
1159 {
1160 /*
1161 * Register, register.
1162 */
1163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1164 IEM_MC_BEGIN(3, 1);
1165 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1166 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1167 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1168 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1169 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1170 IEM_MC_PREPARE_SSE_USAGE();
1171 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1172 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1173 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1174 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1175 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1176
1177 IEM_MC_ADVANCE_RIP();
1178 IEM_MC_END();
1179 }
1180 else
1181 {
1182 /*
1183 * Register, memory.
1184 */
1185 IEM_MC_BEGIN(3, 2);
1186 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1187 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1188 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1189 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1190 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1192
1193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1195 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1196 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1197
1198 IEM_MC_PREPARE_SSE_USAGE();
1199 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1200 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1201 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1202 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1203
1204 IEM_MC_ADVANCE_RIP();
1205 IEM_MC_END();
1206 }
1207 return VINF_SUCCESS;
1208}
1209
1210
1211/** Opcode 0x0f 0x00 /0. */
1212FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1213{
1214 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1215 IEMOP_HLP_MIN_286();
1216 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1217
1218 if (IEM_IS_MODRM_REG_MODE(bRm))
1219 {
1220 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1221 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1222 }
1223
1224 /* Ignore operand size here, memory refs are always 16-bit. */
1225 IEM_MC_BEGIN(2, 0);
1226 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1227 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1229 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1230 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1231 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1232 IEM_MC_END();
1233 return VINF_SUCCESS;
1234}
1235
1236
1237/** Opcode 0x0f 0x00 /1. */
1238FNIEMOPRM_DEF(iemOp_Grp6_str)
1239{
1240 IEMOP_MNEMONIC(str, "str Rv/Mw");
1241 IEMOP_HLP_MIN_286();
1242 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1243
1244
1245 if (IEM_IS_MODRM_REG_MODE(bRm))
1246 {
1247 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1248 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1249 }
1250
1251 /* Ignore operand size here, memory refs are always 16-bit. */
1252 IEM_MC_BEGIN(2, 0);
1253 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1254 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1256 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1257 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1258 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1259 IEM_MC_END();
1260 return VINF_SUCCESS;
1261}
1262
1263
1264/** Opcode 0x0f 0x00 /2. */
1265FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1266{
1267 IEMOP_MNEMONIC(lldt, "lldt Ew");
1268 IEMOP_HLP_MIN_286();
1269 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1270
1271 if (IEM_IS_MODRM_REG_MODE(bRm))
1272 {
1273 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1274 IEM_MC_BEGIN(1, 0);
1275 IEM_MC_ARG(uint16_t, u16Sel, 0);
1276 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1277 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1278 IEM_MC_END();
1279 }
1280 else
1281 {
1282 IEM_MC_BEGIN(1, 1);
1283 IEM_MC_ARG(uint16_t, u16Sel, 0);
1284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1286 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1287 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1288 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1289 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1290 IEM_MC_END();
1291 }
1292 return VINF_SUCCESS;
1293}
1294
1295
1296/** Opcode 0x0f 0x00 /3. */
1297FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1298{
1299 IEMOP_MNEMONIC(ltr, "ltr Ew");
1300 IEMOP_HLP_MIN_286();
1301 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1302
1303 if (IEM_IS_MODRM_REG_MODE(bRm))
1304 {
1305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1306 IEM_MC_BEGIN(1, 0);
1307 IEM_MC_ARG(uint16_t, u16Sel, 0);
1308 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1309 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1310 IEM_MC_END();
1311 }
1312 else
1313 {
1314 IEM_MC_BEGIN(1, 1);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1319 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1320 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1321 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1322 IEM_MC_END();
1323 }
1324 return VINF_SUCCESS;
1325}
1326
1327
1328/** Opcode 0x0f 0x00 /3. */
1329FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1330{
1331 IEMOP_HLP_MIN_286();
1332 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1333
1334 if (IEM_IS_MODRM_REG_MODE(bRm))
1335 {
1336 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1337 IEM_MC_BEGIN(2, 0);
1338 IEM_MC_ARG(uint16_t, u16Sel, 0);
1339 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1340 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1341 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1342 IEM_MC_END();
1343 }
1344 else
1345 {
1346 IEM_MC_BEGIN(2, 1);
1347 IEM_MC_ARG(uint16_t, u16Sel, 0);
1348 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1351 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1352 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1353 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1354 IEM_MC_END();
1355 }
1356 return VINF_SUCCESS;
1357}
1358
1359
1360/** Opcode 0x0f 0x00 /4. */
1361FNIEMOPRM_DEF(iemOp_Grp6_verr)
1362{
1363 IEMOP_MNEMONIC(verr, "verr Ew");
1364 IEMOP_HLP_MIN_286();
1365 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1366}
1367
1368
1369/** Opcode 0x0f 0x00 /5. */
1370FNIEMOPRM_DEF(iemOp_Grp6_verw)
1371{
1372 IEMOP_MNEMONIC(verw, "verw Ew");
1373 IEMOP_HLP_MIN_286();
1374 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1375}
1376
1377
1378/**
1379 * Group 6 jump table.
1380 */
1381IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1382{
1383 iemOp_Grp6_sldt,
1384 iemOp_Grp6_str,
1385 iemOp_Grp6_lldt,
1386 iemOp_Grp6_ltr,
1387 iemOp_Grp6_verr,
1388 iemOp_Grp6_verw,
1389 iemOp_InvalidWithRM,
1390 iemOp_InvalidWithRM
1391};
1392
1393/** Opcode 0x0f 0x00. */
1394FNIEMOP_DEF(iemOp_Grp6)
1395{
1396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1397 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1398}
1399
1400
1401/** Opcode 0x0f 0x01 /0. */
1402FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1403{
1404 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1405 IEMOP_HLP_MIN_286();
1406 IEMOP_HLP_64BIT_OP_SIZE();
1407 IEM_MC_BEGIN(2, 1);
1408 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1409 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1412 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1413 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1414 IEM_MC_END();
1415 return VINF_SUCCESS;
1416}
1417
1418
1419/** Opcode 0x0f 0x01 /0. */
1420FNIEMOP_DEF(iemOp_Grp7_vmcall)
1421{
1422 IEMOP_MNEMONIC(vmcall, "vmcall");
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1424
1425 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1426 want all hypercalls regardless of instruction used, and if a
1427 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1428 (NEM/win makes ASSUMPTIONS about this behavior.) */
1429 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1430}
1431
1432
1433/** Opcode 0x0f 0x01 /0. */
1434#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1435FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1436{
1437 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1438 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1439 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1440 IEMOP_HLP_DONE_DECODING();
1441 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1442}
1443#else
1444FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1445{
1446 IEMOP_BITCH_ABOUT_STUB();
1447 return IEMOP_RAISE_INVALID_OPCODE();
1448}
1449#endif
1450
1451
1452/** Opcode 0x0f 0x01 /0. */
1453#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1454FNIEMOP_DEF(iemOp_Grp7_vmresume)
1455{
1456 IEMOP_MNEMONIC(vmresume, "vmresume");
1457 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1458 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1459 IEMOP_HLP_DONE_DECODING();
1460 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1461}
1462#else
1463FNIEMOP_DEF(iemOp_Grp7_vmresume)
1464{
1465 IEMOP_BITCH_ABOUT_STUB();
1466 return IEMOP_RAISE_INVALID_OPCODE();
1467}
1468#endif
1469
1470
1471/** Opcode 0x0f 0x01 /0. */
1472#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1473FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1474{
1475 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1476 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1477 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1478 IEMOP_HLP_DONE_DECODING();
1479 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1480}
1481#else
1482FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1483{
1484 IEMOP_BITCH_ABOUT_STUB();
1485 return IEMOP_RAISE_INVALID_OPCODE();
1486}
1487#endif
1488
1489
1490/** Opcode 0x0f 0x01 /1. */
1491FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1492{
1493 IEMOP_MNEMONIC(sidt, "sidt Ms");
1494 IEMOP_HLP_MIN_286();
1495 IEMOP_HLP_64BIT_OP_SIZE();
1496 IEM_MC_BEGIN(2, 1);
1497 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1498 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1501 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1502 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1503 IEM_MC_END();
1504 return VINF_SUCCESS;
1505}
1506
1507
1508/** Opcode 0x0f 0x01 /1. */
1509FNIEMOP_DEF(iemOp_Grp7_monitor)
1510{
1511 IEMOP_MNEMONIC(monitor, "monitor");
1512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1513 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1514}
1515
1516
1517/** Opcode 0x0f 0x01 /1. */
1518FNIEMOP_DEF(iemOp_Grp7_mwait)
1519{
1520 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1522 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1523}
1524
1525
1526/** Opcode 0x0f 0x01 /2. */
1527FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1528{
1529 IEMOP_MNEMONIC(lgdt, "lgdt");
1530 IEMOP_HLP_64BIT_OP_SIZE();
1531 IEM_MC_BEGIN(3, 1);
1532 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1533 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1534 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1537 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1538 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1539 IEM_MC_END();
1540 return VINF_SUCCESS;
1541}
1542
1543
1544/** Opcode 0x0f 0x01 0xd0. */
1545FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1546{
1547 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1548 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1549 {
1550 /** @todo r=ramshankar: We should use
1551 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1552 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1553 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1554 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1555 }
1556 return IEMOP_RAISE_INVALID_OPCODE();
1557}
1558
1559
1560/** Opcode 0x0f 0x01 0xd1. */
1561FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1562{
1563 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1564 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1565 {
1566 /** @todo r=ramshankar: We should use
1567 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1568 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1569 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1570 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1571 }
1572 return IEMOP_RAISE_INVALID_OPCODE();
1573}
1574
1575
1576/** Opcode 0x0f 0x01 /3. */
1577FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1578{
1579 IEMOP_MNEMONIC(lidt, "lidt");
1580 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1581 ? IEMMODE_64BIT
1582 : pVCpu->iem.s.enmEffOpSize;
1583 IEM_MC_BEGIN(3, 1);
1584 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1585 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1586 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1589 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1590 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1591 IEM_MC_END();
1592 return VINF_SUCCESS;
1593}
1594
1595
1596/** Opcode 0x0f 0x01 0xd8. */
1597#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1598FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1599{
1600 IEMOP_MNEMONIC(vmrun, "vmrun");
1601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1602 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1603}
1604#else
1605FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1606#endif
1607
1608/** Opcode 0x0f 0x01 0xd9. */
1609FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1610{
1611 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1613
1614 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1615 want all hypercalls regardless of instruction used, and if a
1616 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1617 (NEM/win makes ASSUMPTIONS about this behavior.) */
1618 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1619}
1620
1621/** Opcode 0x0f 0x01 0xda. */
1622#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1623FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1624{
1625 IEMOP_MNEMONIC(vmload, "vmload");
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1627 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1628}
1629#else
1630FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1631#endif
1632
1633
1634/** Opcode 0x0f 0x01 0xdb. */
1635#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1636FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1637{
1638 IEMOP_MNEMONIC(vmsave, "vmsave");
1639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1640 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1641}
1642#else
1643FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1644#endif
1645
1646
1647/** Opcode 0x0f 0x01 0xdc. */
1648#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1649FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1650{
1651 IEMOP_MNEMONIC(stgi, "stgi");
1652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1653 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1654}
1655#else
1656FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1657#endif
1658
1659
1660/** Opcode 0x0f 0x01 0xdd. */
1661#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1662FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1663{
1664 IEMOP_MNEMONIC(clgi, "clgi");
1665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1666 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1667}
1668#else
1669FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1670#endif
1671
1672
1673/** Opcode 0x0f 0x01 0xdf. */
1674#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1675FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1676{
1677 IEMOP_MNEMONIC(invlpga, "invlpga");
1678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1679 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1680}
1681#else
1682FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1683#endif
1684
1685
1686/** Opcode 0x0f 0x01 0xde. */
1687#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1688FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1689{
1690 IEMOP_MNEMONIC(skinit, "skinit");
1691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1692 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1693}
1694#else
1695FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1696#endif
1697
1698
1699/** Opcode 0x0f 0x01 /4. */
1700FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1701{
1702 IEMOP_MNEMONIC(smsw, "smsw");
1703 IEMOP_HLP_MIN_286();
1704 if (IEM_IS_MODRM_REG_MODE(bRm))
1705 {
1706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1707 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1708 }
1709
1710 /* Ignore operand size here, memory refs are always 16-bit. */
1711 IEM_MC_BEGIN(2, 0);
1712 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1713 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1716 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1717 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1718 IEM_MC_END();
1719 return VINF_SUCCESS;
1720}
1721
1722
1723/** Opcode 0x0f 0x01 /6. */
1724FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1725{
1726 /* The operand size is effectively ignored, all is 16-bit and only the
1727 lower 3-bits are used. */
1728 IEMOP_MNEMONIC(lmsw, "lmsw");
1729 IEMOP_HLP_MIN_286();
1730 if (IEM_IS_MODRM_REG_MODE(bRm))
1731 {
1732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1733 IEM_MC_BEGIN(2, 0);
1734 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1735 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1736 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1737 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1738 IEM_MC_END();
1739 }
1740 else
1741 {
1742 IEM_MC_BEGIN(2, 0);
1743 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1744 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1747 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1748 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1749 IEM_MC_END();
1750 }
1751 return VINF_SUCCESS;
1752}
1753
1754
1755/** Opcode 0x0f 0x01 /7. */
1756FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1757{
1758 IEMOP_MNEMONIC(invlpg, "invlpg");
1759 IEMOP_HLP_MIN_486();
1760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1761 IEM_MC_BEGIN(1, 1);
1762 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1764 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1765 IEM_MC_END();
1766 return VINF_SUCCESS;
1767}
1768
1769
1770/** Opcode 0x0f 0x01 /7. */
1771FNIEMOP_DEF(iemOp_Grp7_swapgs)
1772{
1773 IEMOP_MNEMONIC(swapgs, "swapgs");
1774 IEMOP_HLP_ONLY_64BIT();
1775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1776 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1777}
1778
1779
1780/** Opcode 0x0f 0x01 /7. */
1781FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1782{
1783 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1785 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1786}
1787
1788
1789/**
1790 * Group 7 jump table, memory variant.
1791 */
1792IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1793{
1794 iemOp_Grp7_sgdt,
1795 iemOp_Grp7_sidt,
1796 iemOp_Grp7_lgdt,
1797 iemOp_Grp7_lidt,
1798 iemOp_Grp7_smsw,
1799 iemOp_InvalidWithRM,
1800 iemOp_Grp7_lmsw,
1801 iemOp_Grp7_invlpg
1802};
1803
1804
1805/** Opcode 0x0f 0x01. */
1806FNIEMOP_DEF(iemOp_Grp7)
1807{
1808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1809 if (IEM_IS_MODRM_MEM_MODE(bRm))
1810 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1811
1812 switch (IEM_GET_MODRM_REG_8(bRm))
1813 {
1814 case 0:
1815 switch (IEM_GET_MODRM_RM_8(bRm))
1816 {
1817 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1818 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1819 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1820 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1821 }
1822 return IEMOP_RAISE_INVALID_OPCODE();
1823
1824 case 1:
1825 switch (IEM_GET_MODRM_RM_8(bRm))
1826 {
1827 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1828 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1829 }
1830 return IEMOP_RAISE_INVALID_OPCODE();
1831
1832 case 2:
1833 switch (IEM_GET_MODRM_RM_8(bRm))
1834 {
1835 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1836 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1837 }
1838 return IEMOP_RAISE_INVALID_OPCODE();
1839
1840 case 3:
1841 switch (IEM_GET_MODRM_RM_8(bRm))
1842 {
1843 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1844 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1845 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1846 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1847 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1848 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1849 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1850 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1852 }
1853
1854 case 4:
1855 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1856
1857 case 5:
1858 return IEMOP_RAISE_INVALID_OPCODE();
1859
1860 case 6:
1861 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1862
1863 case 7:
1864 switch (IEM_GET_MODRM_RM_8(bRm))
1865 {
1866 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1867 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1868 }
1869 return IEMOP_RAISE_INVALID_OPCODE();
1870
1871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1872 }
1873}
1874
1875/** Opcode 0x0f 0x00 /3. */
1876FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1877{
1878 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1880
1881 if (IEM_IS_MODRM_REG_MODE(bRm))
1882 {
1883 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1884 switch (pVCpu->iem.s.enmEffOpSize)
1885 {
1886 case IEMMODE_16BIT:
1887 {
1888 IEM_MC_BEGIN(3, 0);
1889 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1890 IEM_MC_ARG(uint16_t, u16Sel, 1);
1891 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1892
1893 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1894 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1895 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1896
1897 IEM_MC_END();
1898 return VINF_SUCCESS;
1899 }
1900
1901 case IEMMODE_32BIT:
1902 case IEMMODE_64BIT:
1903 {
1904 IEM_MC_BEGIN(3, 0);
1905 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1906 IEM_MC_ARG(uint16_t, u16Sel, 1);
1907 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1908
1909 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1910 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1911 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1912
1913 IEM_MC_END();
1914 return VINF_SUCCESS;
1915 }
1916
1917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1918 }
1919 }
1920 else
1921 {
1922 switch (pVCpu->iem.s.enmEffOpSize)
1923 {
1924 case IEMMODE_16BIT:
1925 {
1926 IEM_MC_BEGIN(3, 1);
1927 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1928 IEM_MC_ARG(uint16_t, u16Sel, 1);
1929 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1931
1932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1933 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1934
1935 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1936 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1937 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1938
1939 IEM_MC_END();
1940 return VINF_SUCCESS;
1941 }
1942
1943 case IEMMODE_32BIT:
1944 case IEMMODE_64BIT:
1945 {
1946 IEM_MC_BEGIN(3, 1);
1947 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1948 IEM_MC_ARG(uint16_t, u16Sel, 1);
1949 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1951
1952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1953 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1954/** @todo testcase: make sure it's a 16-bit read. */
1955
1956 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1957 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1958 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1959
1960 IEM_MC_END();
1961 return VINF_SUCCESS;
1962 }
1963
1964 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1965 }
1966 }
1967}
1968
1969
1970
1971/** Opcode 0x0f 0x02. */
1972FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1973{
1974 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1975 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1976}
1977
1978
1979/** Opcode 0x0f 0x03. */
1980FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1981{
1982 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1983 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1984}
1985
1986
1987/** Opcode 0x0f 0x05. */
1988FNIEMOP_DEF(iemOp_syscall)
1989{
1990 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1992 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1993}
1994
1995
1996/** Opcode 0x0f 0x06. */
1997FNIEMOP_DEF(iemOp_clts)
1998{
1999 IEMOP_MNEMONIC(clts, "clts");
2000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2001 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
2002}
2003
2004
2005/** Opcode 0x0f 0x07. */
2006FNIEMOP_DEF(iemOp_sysret)
2007{
2008 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
2011}
2012
2013
2014/** Opcode 0x0f 0x08. */
2015FNIEMOP_DEF(iemOp_invd)
2016{
2017 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
2018 IEMOP_HLP_MIN_486();
2019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2020 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
2021}
2022
2023
2024/** Opcode 0x0f 0x09. */
2025FNIEMOP_DEF(iemOp_wbinvd)
2026{
2027 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2028 IEMOP_HLP_MIN_486();
2029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2030 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
2031}
2032
2033
2034/** Opcode 0x0f 0x0b. */
2035FNIEMOP_DEF(iemOp_ud2)
2036{
2037 IEMOP_MNEMONIC(ud2, "ud2");
2038 return IEMOP_RAISE_INVALID_OPCODE();
2039}
2040
2041/** Opcode 0x0f 0x0d. */
2042FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2043{
2044 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2045 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2046 {
2047 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2048 return IEMOP_RAISE_INVALID_OPCODE();
2049 }
2050
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052 if (IEM_IS_MODRM_REG_MODE(bRm))
2053 {
2054 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2055 return IEMOP_RAISE_INVALID_OPCODE();
2056 }
2057
2058 switch (IEM_GET_MODRM_REG_8(bRm))
2059 {
2060 case 2: /* Aliased to /0 for the time being. */
2061 case 4: /* Aliased to /0 for the time being. */
2062 case 5: /* Aliased to /0 for the time being. */
2063 case 6: /* Aliased to /0 for the time being. */
2064 case 7: /* Aliased to /0 for the time being. */
2065 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2066 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2067 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2069 }
2070
2071 IEM_MC_BEGIN(0, 1);
2072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2075 /* Currently a NOP. */
2076 NOREF(GCPtrEffSrc);
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 return VINF_SUCCESS;
2080}
2081
2082
2083/** Opcode 0x0f 0x0e. */
2084FNIEMOP_DEF(iemOp_femms)
2085{
2086 IEMOP_MNEMONIC(femms, "femms");
2087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2088
2089 IEM_MC_BEGIN(0,0);
2090 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2091 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2092 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2093 IEM_MC_FPU_FROM_MMX_MODE();
2094 IEM_MC_ADVANCE_RIP();
2095 IEM_MC_END();
2096 return VINF_SUCCESS;
2097}
2098
2099
2100/** Opcode 0x0f 0x0f. */
2101FNIEMOP_DEF(iemOp_3Dnow)
2102{
2103 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2104 {
2105 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2106 return IEMOP_RAISE_INVALID_OPCODE();
2107 }
2108
2109#ifdef IEM_WITH_3DNOW
2110 /* This is pretty sparse, use switch instead of table. */
2111 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2112 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2113#else
2114 IEMOP_BITCH_ABOUT_STUB();
2115 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2116#endif
2117}
2118
2119
2120/**
2121 * @opcode 0x10
2122 * @oppfx none
2123 * @opcpuid sse
2124 * @opgroup og_sse_simdfp_datamove
2125 * @opxcpttype 4UA
2126 * @optest op1=1 op2=2 -> op1=2
2127 * @optest op1=0 op2=-22 -> op1=-22
2128 */
2129FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2130{
2131 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2132 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2133 if (IEM_IS_MODRM_REG_MODE(bRm))
2134 {
2135 /*
2136 * Register, register.
2137 */
2138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2139 IEM_MC_BEGIN(0, 0);
2140 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2141 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2142 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2143 IEM_GET_MODRM_RM(pVCpu, bRm));
2144 IEM_MC_ADVANCE_RIP();
2145 IEM_MC_END();
2146 }
2147 else
2148 {
2149 /*
2150 * Memory, register.
2151 */
2152 IEM_MC_BEGIN(0, 2);
2153 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2155
2156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2158 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2160
2161 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2162 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2163
2164 IEM_MC_ADVANCE_RIP();
2165 IEM_MC_END();
2166 }
2167 return VINF_SUCCESS;
2168
2169}
2170
2171
2172/**
2173 * @opcode 0x10
2174 * @oppfx 0x66
2175 * @opcpuid sse2
2176 * @opgroup og_sse2_pcksclr_datamove
2177 * @opxcpttype 4UA
2178 * @optest op1=1 op2=2 -> op1=2
2179 * @optest op1=0 op2=-42 -> op1=-42
2180 */
2181FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2182{
2183 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2185 if (IEM_IS_MODRM_REG_MODE(bRm))
2186 {
2187 /*
2188 * Register, register.
2189 */
2190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2191 IEM_MC_BEGIN(0, 0);
2192 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2193 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2194 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2195 IEM_GET_MODRM_RM(pVCpu, bRm));
2196 IEM_MC_ADVANCE_RIP();
2197 IEM_MC_END();
2198 }
2199 else
2200 {
2201 /*
2202 * Memory, register.
2203 */
2204 IEM_MC_BEGIN(0, 2);
2205 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2207
2208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2210 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2211 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2212
2213 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2214 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2215
2216 IEM_MC_ADVANCE_RIP();
2217 IEM_MC_END();
2218 }
2219 return VINF_SUCCESS;
2220}
2221
2222
2223/**
2224 * @opcode 0x10
2225 * @oppfx 0xf3
2226 * @opcpuid sse
2227 * @opgroup og_sse_simdfp_datamove
2228 * @opxcpttype 5
2229 * @optest op1=1 op2=2 -> op1=2
2230 * @optest op1=0 op2=-22 -> op1=-22
2231 */
2232FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2233{
2234 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2236 if (IEM_IS_MODRM_REG_MODE(bRm))
2237 {
2238 /*
2239 * Register, register.
2240 */
2241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2242 IEM_MC_BEGIN(0, 1);
2243 IEM_MC_LOCAL(uint32_t, uSrc);
2244
2245 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2246 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2247 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2248 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2249
2250 IEM_MC_ADVANCE_RIP();
2251 IEM_MC_END();
2252 }
2253 else
2254 {
2255 /*
2256 * Memory, register.
2257 */
2258 IEM_MC_BEGIN(0, 2);
2259 IEM_MC_LOCAL(uint32_t, uSrc);
2260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2261
2262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2264 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2265 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2266
2267 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2268 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2269
2270 IEM_MC_ADVANCE_RIP();
2271 IEM_MC_END();
2272 }
2273 return VINF_SUCCESS;
2274}
2275
2276
2277/**
2278 * @opcode 0x10
2279 * @oppfx 0xf2
2280 * @opcpuid sse2
2281 * @opgroup og_sse2_pcksclr_datamove
2282 * @opxcpttype 5
2283 * @optest op1=1 op2=2 -> op1=2
2284 * @optest op1=0 op2=-42 -> op1=-42
2285 */
2286FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2287{
2288 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2290 if (IEM_IS_MODRM_REG_MODE(bRm))
2291 {
2292 /*
2293 * Register, register.
2294 */
2295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2296 IEM_MC_BEGIN(0, 1);
2297 IEM_MC_LOCAL(uint64_t, uSrc);
2298
2299 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2300 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2301 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2302 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2303
2304 IEM_MC_ADVANCE_RIP();
2305 IEM_MC_END();
2306 }
2307 else
2308 {
2309 /*
2310 * Memory, register.
2311 */
2312 IEM_MC_BEGIN(0, 2);
2313 IEM_MC_LOCAL(uint64_t, uSrc);
2314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2315
2316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2318 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2319 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2320
2321 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2322 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2323
2324 IEM_MC_ADVANCE_RIP();
2325 IEM_MC_END();
2326 }
2327 return VINF_SUCCESS;
2328}
2329
2330
2331/**
2332 * @opcode 0x11
2333 * @oppfx none
2334 * @opcpuid sse
2335 * @opgroup og_sse_simdfp_datamove
2336 * @opxcpttype 4UA
2337 * @optest op1=1 op2=2 -> op1=2
2338 * @optest op1=0 op2=-42 -> op1=-42
2339 */
2340FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2341{
2342 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2344 if (IEM_IS_MODRM_REG_MODE(bRm))
2345 {
2346 /*
2347 * Register, register.
2348 */
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_BEGIN(0, 0);
2351 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2352 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2353 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2354 IEM_GET_MODRM_REG(pVCpu, bRm));
2355 IEM_MC_ADVANCE_RIP();
2356 IEM_MC_END();
2357 }
2358 else
2359 {
2360 /*
2361 * Memory, register.
2362 */
2363 IEM_MC_BEGIN(0, 2);
2364 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2366
2367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2369 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2370 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2371
2372 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2373 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2374
2375 IEM_MC_ADVANCE_RIP();
2376 IEM_MC_END();
2377 }
2378 return VINF_SUCCESS;
2379}
2380
2381
2382/**
2383 * @opcode 0x11
2384 * @oppfx 0x66
2385 * @opcpuid sse2
2386 * @opgroup og_sse2_pcksclr_datamove
2387 * @opxcpttype 4UA
2388 * @optest op1=1 op2=2 -> op1=2
2389 * @optest op1=0 op2=-42 -> op1=-42
2390 */
2391FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2392{
2393 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2395 if (IEM_IS_MODRM_REG_MODE(bRm))
2396 {
2397 /*
2398 * Register, register.
2399 */
2400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2401 IEM_MC_BEGIN(0, 0);
2402 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2403 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2404 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2405 IEM_GET_MODRM_REG(pVCpu, bRm));
2406 IEM_MC_ADVANCE_RIP();
2407 IEM_MC_END();
2408 }
2409 else
2410 {
2411 /*
2412 * Memory, register.
2413 */
2414 IEM_MC_BEGIN(0, 2);
2415 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2417
2418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2420 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2421 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2422
2423 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2424 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2425
2426 IEM_MC_ADVANCE_RIP();
2427 IEM_MC_END();
2428 }
2429 return VINF_SUCCESS;
2430}
2431
2432
2433/**
2434 * @opcode 0x11
2435 * @oppfx 0xf3
2436 * @opcpuid sse
2437 * @opgroup og_sse_simdfp_datamove
2438 * @opxcpttype 5
2439 * @optest op1=1 op2=2 -> op1=2
2440 * @optest op1=0 op2=-22 -> op1=-22
2441 */
2442FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2443{
2444 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2446 if (IEM_IS_MODRM_REG_MODE(bRm))
2447 {
2448 /*
2449 * Register, register.
2450 */
2451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2452 IEM_MC_BEGIN(0, 1);
2453 IEM_MC_LOCAL(uint32_t, uSrc);
2454
2455 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2456 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2457 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2458 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2459
2460 IEM_MC_ADVANCE_RIP();
2461 IEM_MC_END();
2462 }
2463 else
2464 {
2465 /*
2466 * Memory, register.
2467 */
2468 IEM_MC_BEGIN(0, 2);
2469 IEM_MC_LOCAL(uint32_t, uSrc);
2470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2471
2472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2474 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2475 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2476
2477 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2478 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2479
2480 IEM_MC_ADVANCE_RIP();
2481 IEM_MC_END();
2482 }
2483 return VINF_SUCCESS;
2484}
2485
2486
2487/**
2488 * @opcode 0x11
2489 * @oppfx 0xf2
2490 * @opcpuid sse2
2491 * @opgroup og_sse2_pcksclr_datamove
2492 * @opxcpttype 5
2493 * @optest op1=1 op2=2 -> op1=2
2494 * @optest op1=0 op2=-42 -> op1=-42
2495 */
2496FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2497{
2498 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 if (IEM_IS_MODRM_REG_MODE(bRm))
2501 {
2502 /*
2503 * Register, register.
2504 */
2505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2506 IEM_MC_BEGIN(0, 1);
2507 IEM_MC_LOCAL(uint64_t, uSrc);
2508
2509 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2510 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2511 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2512 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2513
2514 IEM_MC_ADVANCE_RIP();
2515 IEM_MC_END();
2516 }
2517 else
2518 {
2519 /*
2520 * Memory, register.
2521 */
2522 IEM_MC_BEGIN(0, 2);
2523 IEM_MC_LOCAL(uint64_t, uSrc);
2524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2525
2526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2528 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2529 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2530
2531 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2532 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2533
2534 IEM_MC_ADVANCE_RIP();
2535 IEM_MC_END();
2536 }
2537 return VINF_SUCCESS;
2538}
2539
2540
2541FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2542{
2543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2544 if (IEM_IS_MODRM_REG_MODE(bRm))
2545 {
2546 /**
2547 * @opcode 0x12
2548 * @opcodesub 11 mr/reg
2549 * @oppfx none
2550 * @opcpuid sse
2551 * @opgroup og_sse_simdfp_datamove
2552 * @opxcpttype 5
2553 * @optest op1=1 op2=2 -> op1=2
2554 * @optest op1=0 op2=-42 -> op1=-42
2555 */
2556 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2557
2558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2559 IEM_MC_BEGIN(0, 1);
2560 IEM_MC_LOCAL(uint64_t, uSrc);
2561
2562 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2563 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2564 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2565 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2566
2567 IEM_MC_ADVANCE_RIP();
2568 IEM_MC_END();
2569 }
2570 else
2571 {
2572 /**
2573 * @opdone
2574 * @opcode 0x12
2575 * @opcodesub !11 mr/reg
2576 * @oppfx none
2577 * @opcpuid sse
2578 * @opgroup og_sse_simdfp_datamove
2579 * @opxcpttype 5
2580 * @optest op1=1 op2=2 -> op1=2
2581 * @optest op1=0 op2=-42 -> op1=-42
2582 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2583 */
2584 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2585
2586 IEM_MC_BEGIN(0, 2);
2587 IEM_MC_LOCAL(uint64_t, uSrc);
2588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2589
2590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2592 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2593 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2594
2595 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2596 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2597
2598 IEM_MC_ADVANCE_RIP();
2599 IEM_MC_END();
2600 }
2601 return VINF_SUCCESS;
2602}
2603
2604
2605/**
2606 * @opcode 0x12
2607 * @opcodesub !11 mr/reg
2608 * @oppfx 0x66
2609 * @opcpuid sse2
2610 * @opgroup og_sse2_pcksclr_datamove
2611 * @opxcpttype 5
2612 * @optest op1=1 op2=2 -> op1=2
2613 * @optest op1=0 op2=-42 -> op1=-42
2614 */
2615FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2616{
2617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2618 if (IEM_IS_MODRM_MEM_MODE(bRm))
2619 {
2620 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2621
2622 IEM_MC_BEGIN(0, 2);
2623 IEM_MC_LOCAL(uint64_t, uSrc);
2624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2625
2626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2628 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2629 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2630
2631 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2632 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2633
2634 IEM_MC_ADVANCE_RIP();
2635 IEM_MC_END();
2636 return VINF_SUCCESS;
2637 }
2638
2639 /**
2640 * @opdone
2641 * @opmnemonic ud660f12m3
2642 * @opcode 0x12
2643 * @opcodesub 11 mr/reg
2644 * @oppfx 0x66
2645 * @opunused immediate
2646 * @opcpuid sse
2647 * @optest ->
2648 */
2649 return IEMOP_RAISE_INVALID_OPCODE();
2650}
2651
2652
2653/**
2654 * @opcode 0x12
2655 * @oppfx 0xf3
2656 * @opcpuid sse3
2657 * @opgroup og_sse3_pcksclr_datamove
2658 * @opxcpttype 4
2659 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2660 * op1=0x00000002000000020000000100000001
2661 */
2662FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2663{
2664 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2666 if (IEM_IS_MODRM_REG_MODE(bRm))
2667 {
2668 /*
2669 * Register, register.
2670 */
2671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2672 IEM_MC_BEGIN(2, 0);
2673 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2674 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2675
2676 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2677 IEM_MC_PREPARE_SSE_USAGE();
2678
2679 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2680 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2681 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2682
2683 IEM_MC_ADVANCE_RIP();
2684 IEM_MC_END();
2685 }
2686 else
2687 {
2688 /*
2689 * Register, memory.
2690 */
2691 IEM_MC_BEGIN(2, 2);
2692 IEM_MC_LOCAL(RTUINT128U, uSrc);
2693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2694 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2695 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2696
2697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2699 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2700 IEM_MC_PREPARE_SSE_USAGE();
2701
2702 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2703 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2704 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2705
2706 IEM_MC_ADVANCE_RIP();
2707 IEM_MC_END();
2708 }
2709 return VINF_SUCCESS;
2710}
2711
2712
2713/**
2714 * @opcode 0x12
2715 * @oppfx 0xf2
2716 * @opcpuid sse3
2717 * @opgroup og_sse3_pcksclr_datamove
2718 * @opxcpttype 5
2719 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2720 * op1=0x22222222111111112222222211111111
2721 */
2722FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2723{
2724 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2726 if (IEM_IS_MODRM_REG_MODE(bRm))
2727 {
2728 /*
2729 * Register, register.
2730 */
2731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2732 IEM_MC_BEGIN(2, 0);
2733 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2734 IEM_MC_ARG(uint64_t, uSrc, 1);
2735
2736 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2737 IEM_MC_PREPARE_SSE_USAGE();
2738
2739 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2740 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2741 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2742
2743 IEM_MC_ADVANCE_RIP();
2744 IEM_MC_END();
2745 }
2746 else
2747 {
2748 /*
2749 * Register, memory.
2750 */
2751 IEM_MC_BEGIN(2, 2);
2752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2753 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2754 IEM_MC_ARG(uint64_t, uSrc, 1);
2755
2756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2758 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2759 IEM_MC_PREPARE_SSE_USAGE();
2760
2761 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2762 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2763 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2764
2765 IEM_MC_ADVANCE_RIP();
2766 IEM_MC_END();
2767 }
2768 return VINF_SUCCESS;
2769}
2770
2771
2772/**
2773 * @opcode 0x13
2774 * @opcodesub !11 mr/reg
2775 * @oppfx none
2776 * @opcpuid sse
2777 * @opgroup og_sse_simdfp_datamove
2778 * @opxcpttype 5
2779 * @optest op1=1 op2=2 -> op1=2
2780 * @optest op1=0 op2=-42 -> op1=-42
2781 */
2782FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2783{
2784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2785 if (IEM_IS_MODRM_MEM_MODE(bRm))
2786 {
2787 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2788
2789 IEM_MC_BEGIN(0, 2);
2790 IEM_MC_LOCAL(uint64_t, uSrc);
2791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2792
2793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2795 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2796 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2797
2798 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2799 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2800
2801 IEM_MC_ADVANCE_RIP();
2802 IEM_MC_END();
2803 return VINF_SUCCESS;
2804 }
2805
2806 /**
2807 * @opdone
2808 * @opmnemonic ud0f13m3
2809 * @opcode 0x13
2810 * @opcodesub 11 mr/reg
2811 * @oppfx none
2812 * @opunused immediate
2813 * @opcpuid sse
2814 * @optest ->
2815 */
2816 return IEMOP_RAISE_INVALID_OPCODE();
2817}
2818
2819
2820/**
2821 * @opcode 0x13
2822 * @opcodesub !11 mr/reg
2823 * @oppfx 0x66
2824 * @opcpuid sse2
2825 * @opgroup og_sse2_pcksclr_datamove
2826 * @opxcpttype 5
2827 * @optest op1=1 op2=2 -> op1=2
2828 * @optest op1=0 op2=-42 -> op1=-42
2829 */
2830FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2831{
2832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2833 if (IEM_IS_MODRM_MEM_MODE(bRm))
2834 {
2835 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2836 IEM_MC_BEGIN(0, 2);
2837 IEM_MC_LOCAL(uint64_t, uSrc);
2838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2839
2840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2842 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2843 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2844
2845 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2846 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2847
2848 IEM_MC_ADVANCE_RIP();
2849 IEM_MC_END();
2850 return VINF_SUCCESS;
2851 }
2852
2853 /**
2854 * @opdone
2855 * @opmnemonic ud660f13m3
2856 * @opcode 0x13
2857 * @opcodesub 11 mr/reg
2858 * @oppfx 0x66
2859 * @opunused immediate
2860 * @opcpuid sse
2861 * @optest ->
2862 */
2863 return IEMOP_RAISE_INVALID_OPCODE();
2864}
2865
2866
2867/**
2868 * @opmnemonic udf30f13
2869 * @opcode 0x13
2870 * @oppfx 0xf3
2871 * @opunused intel-modrm
2872 * @opcpuid sse
2873 * @optest ->
2874 * @opdone
2875 */
2876
2877/**
2878 * @opmnemonic udf20f13
2879 * @opcode 0x13
2880 * @oppfx 0xf2
2881 * @opunused intel-modrm
2882 * @opcpuid sse
2883 * @optest ->
2884 * @opdone
2885 */
2886
2887/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2888FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2889{
2890 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2891 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2892}
2893
2894
2895/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2896FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2897{
2898 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2899 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2900}
2901
2902
2903/**
2904 * @opdone
2905 * @opmnemonic udf30f14
2906 * @opcode 0x14
2907 * @oppfx 0xf3
2908 * @opunused intel-modrm
2909 * @opcpuid sse
2910 * @optest ->
2911 * @opdone
2912 */
2913
2914/**
2915 * @opmnemonic udf20f14
2916 * @opcode 0x14
2917 * @oppfx 0xf2
2918 * @opunused intel-modrm
2919 * @opcpuid sse
2920 * @optest ->
2921 * @opdone
2922 */
2923
2924/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2925FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2926{
2927 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2928 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2929}
2930
2931
2932/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2933FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2934{
2935 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2936 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2937}
2938
2939
2940/* Opcode 0xf3 0x0f 0x15 - invalid */
2941/* Opcode 0xf2 0x0f 0x15 - invalid */
2942
2943/**
2944 * @opdone
2945 * @opmnemonic udf30f15
2946 * @opcode 0x15
2947 * @oppfx 0xf3
2948 * @opunused intel-modrm
2949 * @opcpuid sse
2950 * @optest ->
2951 * @opdone
2952 */
2953
2954/**
2955 * @opmnemonic udf20f15
2956 * @opcode 0x15
2957 * @oppfx 0xf2
2958 * @opunused intel-modrm
2959 * @opcpuid sse
2960 * @optest ->
2961 * @opdone
2962 */
2963
2964FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2965{
2966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2967 if (IEM_IS_MODRM_REG_MODE(bRm))
2968 {
2969 /**
2970 * @opcode 0x16
2971 * @opcodesub 11 mr/reg
2972 * @oppfx none
2973 * @opcpuid sse
2974 * @opgroup og_sse_simdfp_datamove
2975 * @opxcpttype 5
2976 * @optest op1=1 op2=2 -> op1=2
2977 * @optest op1=0 op2=-42 -> op1=-42
2978 */
2979 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2980
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2982 IEM_MC_BEGIN(0, 1);
2983 IEM_MC_LOCAL(uint64_t, uSrc);
2984
2985 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2986 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2987 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2988 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2989
2990 IEM_MC_ADVANCE_RIP();
2991 IEM_MC_END();
2992 }
2993 else
2994 {
2995 /**
2996 * @opdone
2997 * @opcode 0x16
2998 * @opcodesub !11 mr/reg
2999 * @oppfx none
3000 * @opcpuid sse
3001 * @opgroup og_sse_simdfp_datamove
3002 * @opxcpttype 5
3003 * @optest op1=1 op2=2 -> op1=2
3004 * @optest op1=0 op2=-42 -> op1=-42
3005 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
3006 */
3007 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3008
3009 IEM_MC_BEGIN(0, 2);
3010 IEM_MC_LOCAL(uint64_t, uSrc);
3011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3012
3013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3015 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3016 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3017
3018 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3019 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3020
3021 IEM_MC_ADVANCE_RIP();
3022 IEM_MC_END();
3023 }
3024 return VINF_SUCCESS;
3025}
3026
3027
3028/**
3029 * @opcode 0x16
3030 * @opcodesub !11 mr/reg
3031 * @oppfx 0x66
3032 * @opcpuid sse2
3033 * @opgroup og_sse2_pcksclr_datamove
3034 * @opxcpttype 5
3035 * @optest op1=1 op2=2 -> op1=2
3036 * @optest op1=0 op2=-42 -> op1=-42
3037 */
3038FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3039{
3040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3041 if (IEM_IS_MODRM_MEM_MODE(bRm))
3042 {
3043 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3044 IEM_MC_BEGIN(0, 2);
3045 IEM_MC_LOCAL(uint64_t, uSrc);
3046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3047
3048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3051 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3052
3053 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3054 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3055
3056 IEM_MC_ADVANCE_RIP();
3057 IEM_MC_END();
3058 return VINF_SUCCESS;
3059 }
3060
3061 /**
3062 * @opdone
3063 * @opmnemonic ud660f16m3
3064 * @opcode 0x16
3065 * @opcodesub 11 mr/reg
3066 * @oppfx 0x66
3067 * @opunused immediate
3068 * @opcpuid sse
3069 * @optest ->
3070 */
3071 return IEMOP_RAISE_INVALID_OPCODE();
3072}
3073
3074
3075/**
3076 * @opcode 0x16
3077 * @oppfx 0xf3
3078 * @opcpuid sse3
3079 * @opgroup og_sse3_pcksclr_datamove
3080 * @opxcpttype 4
3081 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3082 * op1=0x00000002000000020000000100000001
3083 */
3084FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3085{
3086 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3088 if (IEM_IS_MODRM_REG_MODE(bRm))
3089 {
3090 /*
3091 * Register, register.
3092 */
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094 IEM_MC_BEGIN(2, 0);
3095 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3096 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3097
3098 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3099 IEM_MC_PREPARE_SSE_USAGE();
3100
3101 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3102 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3103 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3104
3105 IEM_MC_ADVANCE_RIP();
3106 IEM_MC_END();
3107 }
3108 else
3109 {
3110 /*
3111 * Register, memory.
3112 */
3113 IEM_MC_BEGIN(2, 2);
3114 IEM_MC_LOCAL(RTUINT128U, uSrc);
3115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3116 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3117 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3118
3119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3121 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3122 IEM_MC_PREPARE_SSE_USAGE();
3123
3124 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3125 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3126 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3127
3128 IEM_MC_ADVANCE_RIP();
3129 IEM_MC_END();
3130 }
3131 return VINF_SUCCESS;
3132}
3133
3134/**
3135 * @opdone
3136 * @opmnemonic udf30f16
3137 * @opcode 0x16
3138 * @oppfx 0xf2
3139 * @opunused intel-modrm
3140 * @opcpuid sse
3141 * @optest ->
3142 * @opdone
3143 */
3144
3145
3146/**
3147 * @opcode 0x17
3148 * @opcodesub !11 mr/reg
3149 * @oppfx none
3150 * @opcpuid sse
3151 * @opgroup og_sse_simdfp_datamove
3152 * @opxcpttype 5
3153 * @optest op1=1 op2=2 -> op1=2
3154 * @optest op1=0 op2=-42 -> op1=-42
3155 */
3156FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3157{
3158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3159 if (IEM_IS_MODRM_MEM_MODE(bRm))
3160 {
3161 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3162
3163 IEM_MC_BEGIN(0, 2);
3164 IEM_MC_LOCAL(uint64_t, uSrc);
3165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3166
3167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3169 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3170 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3171
3172 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3173 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3174
3175 IEM_MC_ADVANCE_RIP();
3176 IEM_MC_END();
3177 return VINF_SUCCESS;
3178 }
3179
3180 /**
3181 * @opdone
3182 * @opmnemonic ud0f17m3
3183 * @opcode 0x17
3184 * @opcodesub 11 mr/reg
3185 * @oppfx none
3186 * @opunused immediate
3187 * @opcpuid sse
3188 * @optest ->
3189 */
3190 return IEMOP_RAISE_INVALID_OPCODE();
3191}
3192
3193
3194/**
3195 * @opcode 0x17
3196 * @opcodesub !11 mr/reg
3197 * @oppfx 0x66
3198 * @opcpuid sse2
3199 * @opgroup og_sse2_pcksclr_datamove
3200 * @opxcpttype 5
3201 * @optest op1=1 op2=2 -> op1=2
3202 * @optest op1=0 op2=-42 -> op1=-42
3203 */
3204FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3205{
3206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3207 if (IEM_IS_MODRM_MEM_MODE(bRm))
3208 {
3209 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3210
3211 IEM_MC_BEGIN(0, 2);
3212 IEM_MC_LOCAL(uint64_t, uSrc);
3213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3214
3215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3217 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3218 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3219
3220 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3221 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3222
3223 IEM_MC_ADVANCE_RIP();
3224 IEM_MC_END();
3225 return VINF_SUCCESS;
3226 }
3227
3228 /**
3229 * @opdone
3230 * @opmnemonic ud660f17m3
3231 * @opcode 0x17
3232 * @opcodesub 11 mr/reg
3233 * @oppfx 0x66
3234 * @opunused immediate
3235 * @opcpuid sse
3236 * @optest ->
3237 */
3238 return IEMOP_RAISE_INVALID_OPCODE();
3239}
3240
3241
3242/**
3243 * @opdone
3244 * @opmnemonic udf30f17
3245 * @opcode 0x17
3246 * @oppfx 0xf3
3247 * @opunused intel-modrm
3248 * @opcpuid sse
3249 * @optest ->
3250 * @opdone
3251 */
3252
3253/**
3254 * @opmnemonic udf20f17
3255 * @opcode 0x17
3256 * @oppfx 0xf2
3257 * @opunused intel-modrm
3258 * @opcpuid sse
3259 * @optest ->
3260 * @opdone
3261 */
3262
3263
3264/** Opcode 0x0f 0x18. */
3265FNIEMOP_DEF(iemOp_prefetch_Grp16)
3266{
3267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3268 if (IEM_IS_MODRM_MEM_MODE(bRm))
3269 {
3270 switch (IEM_GET_MODRM_REG_8(bRm))
3271 {
3272 case 4: /* Aliased to /0 for the time being according to AMD. */
3273 case 5: /* Aliased to /0 for the time being according to AMD. */
3274 case 6: /* Aliased to /0 for the time being according to AMD. */
3275 case 7: /* Aliased to /0 for the time being according to AMD. */
3276 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3277 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3278 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3279 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3280 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3281 }
3282
3283 IEM_MC_BEGIN(0, 1);
3284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3287 /* Currently a NOP. */
3288 NOREF(GCPtrEffSrc);
3289 IEM_MC_ADVANCE_RIP();
3290 IEM_MC_END();
3291 return VINF_SUCCESS;
3292 }
3293
3294 return IEMOP_RAISE_INVALID_OPCODE();
3295}
3296
3297
3298/** Opcode 0x0f 0x19..0x1f. */
3299FNIEMOP_DEF(iemOp_nop_Ev)
3300{
3301 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3303 if (IEM_IS_MODRM_REG_MODE(bRm))
3304 {
3305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3306 IEM_MC_BEGIN(0, 0);
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 else
3311 {
3312 IEM_MC_BEGIN(0, 1);
3313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3316 /* Currently a NOP. */
3317 NOREF(GCPtrEffSrc);
3318 IEM_MC_ADVANCE_RIP();
3319 IEM_MC_END();
3320 }
3321 return VINF_SUCCESS;
3322}
3323
3324
3325/** Opcode 0x0f 0x20. */
3326FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3327{
3328 /* mod is ignored, as is operand size overrides. */
3329 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3330 IEMOP_HLP_MIN_386();
3331 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3332 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3333 else
3334 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3335
3336 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3337 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3338 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3339 {
3340 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3341 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3342 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3343 iCrReg |= 8;
3344 }
3345 switch (iCrReg)
3346 {
3347 case 0: case 2: case 3: case 4: case 8:
3348 break;
3349 default:
3350 return IEMOP_RAISE_INVALID_OPCODE();
3351 }
3352 IEMOP_HLP_DONE_DECODING();
3353
3354 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3355}
3356
3357
3358/** Opcode 0x0f 0x21. */
3359FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3360{
3361 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3362 IEMOP_HLP_MIN_386();
3363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3365 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3366 return IEMOP_RAISE_INVALID_OPCODE();
3367 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3368 IEM_GET_MODRM_RM(pVCpu, bRm),
3369 IEM_GET_MODRM_REG_8(bRm));
3370}
3371
3372
3373/** Opcode 0x0f 0x22. */
3374FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3375{
3376 /* mod is ignored, as is operand size overrides. */
3377 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3378 IEMOP_HLP_MIN_386();
3379 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3380 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3381 else
3382 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3383
3384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3385 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3386 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3387 {
3388 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3389 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3390 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3391 iCrReg |= 8;
3392 }
3393 switch (iCrReg)
3394 {
3395 case 0: case 2: case 3: case 4: case 8:
3396 break;
3397 default:
3398 return IEMOP_RAISE_INVALID_OPCODE();
3399 }
3400 IEMOP_HLP_DONE_DECODING();
3401
3402 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3403}
3404
3405
3406/** Opcode 0x0f 0x23. */
3407FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3408{
3409 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3410 IEMOP_HLP_MIN_386();
3411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3413 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3414 return IEMOP_RAISE_INVALID_OPCODE();
3415 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3416 IEM_GET_MODRM_REG_8(bRm),
3417 IEM_GET_MODRM_RM(pVCpu, bRm));
3418}
3419
3420
3421/** Opcode 0x0f 0x24. */
3422FNIEMOP_DEF(iemOp_mov_Rd_Td)
3423{
3424 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3425 IEMOP_HLP_MIN_386();
3426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3428 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3429 return IEMOP_RAISE_INVALID_OPCODE();
3430 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3431 IEM_GET_MODRM_RM(pVCpu, bRm),
3432 IEM_GET_MODRM_REG_8(bRm));
3433}
3434
3435
3436/** Opcode 0x0f 0x26. */
3437FNIEMOP_DEF(iemOp_mov_Td_Rd)
3438{
3439 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3440 IEMOP_HLP_MIN_386();
3441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3443 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3444 return IEMOP_RAISE_INVALID_OPCODE();
3445 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3446 IEM_GET_MODRM_REG_8(bRm),
3447 IEM_GET_MODRM_RM(pVCpu, bRm));
3448}
3449
3450
3451/**
3452 * @opcode 0x28
3453 * @oppfx none
3454 * @opcpuid sse
3455 * @opgroup og_sse_simdfp_datamove
3456 * @opxcpttype 1
3457 * @optest op1=1 op2=2 -> op1=2
3458 * @optest op1=0 op2=-42 -> op1=-42
3459 */
3460FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3461{
3462 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3464 if (IEM_IS_MODRM_REG_MODE(bRm))
3465 {
3466 /*
3467 * Register, register.
3468 */
3469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3470 IEM_MC_BEGIN(0, 0);
3471 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3472 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3473 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3474 IEM_GET_MODRM_RM(pVCpu, bRm));
3475 IEM_MC_ADVANCE_RIP();
3476 IEM_MC_END();
3477 }
3478 else
3479 {
3480 /*
3481 * Register, memory.
3482 */
3483 IEM_MC_BEGIN(0, 2);
3484 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3486
3487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3489 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3490 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3491
3492 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3493 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3494
3495 IEM_MC_ADVANCE_RIP();
3496 IEM_MC_END();
3497 }
3498 return VINF_SUCCESS;
3499}
3500
3501/**
3502 * @opcode 0x28
3503 * @oppfx 66
3504 * @opcpuid sse2
3505 * @opgroup og_sse2_pcksclr_datamove
3506 * @opxcpttype 1
3507 * @optest op1=1 op2=2 -> op1=2
3508 * @optest op1=0 op2=-42 -> op1=-42
3509 */
3510FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3511{
3512 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3514 if (IEM_IS_MODRM_REG_MODE(bRm))
3515 {
3516 /*
3517 * Register, register.
3518 */
3519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3520 IEM_MC_BEGIN(0, 0);
3521 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3522 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3523 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3524 IEM_GET_MODRM_RM(pVCpu, bRm));
3525 IEM_MC_ADVANCE_RIP();
3526 IEM_MC_END();
3527 }
3528 else
3529 {
3530 /*
3531 * Register, memory.
3532 */
3533 IEM_MC_BEGIN(0, 2);
3534 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3536
3537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3539 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3540 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3541
3542 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3543 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3544
3545 IEM_MC_ADVANCE_RIP();
3546 IEM_MC_END();
3547 }
3548 return VINF_SUCCESS;
3549}
3550
3551/* Opcode 0xf3 0x0f 0x28 - invalid */
3552/* Opcode 0xf2 0x0f 0x28 - invalid */
3553
3554/**
3555 * @opcode 0x29
3556 * @oppfx none
3557 * @opcpuid sse
3558 * @opgroup og_sse_simdfp_datamove
3559 * @opxcpttype 1
3560 * @optest op1=1 op2=2 -> op1=2
3561 * @optest op1=0 op2=-42 -> op1=-42
3562 */
3563FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3564{
3565 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3567 if (IEM_IS_MODRM_REG_MODE(bRm))
3568 {
3569 /*
3570 * Register, register.
3571 */
3572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3573 IEM_MC_BEGIN(0, 0);
3574 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3575 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3576 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3577 IEM_GET_MODRM_REG(pVCpu, bRm));
3578 IEM_MC_ADVANCE_RIP();
3579 IEM_MC_END();
3580 }
3581 else
3582 {
3583 /*
3584 * Memory, register.
3585 */
3586 IEM_MC_BEGIN(0, 2);
3587 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3589
3590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3592 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3593 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3594
3595 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3596 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3597
3598 IEM_MC_ADVANCE_RIP();
3599 IEM_MC_END();
3600 }
3601 return VINF_SUCCESS;
3602}
3603
3604/**
3605 * @opcode 0x29
3606 * @oppfx 66
3607 * @opcpuid sse2
3608 * @opgroup og_sse2_pcksclr_datamove
3609 * @opxcpttype 1
3610 * @optest op1=1 op2=2 -> op1=2
3611 * @optest op1=0 op2=-42 -> op1=-42
3612 */
3613FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3614{
3615 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3617 if (IEM_IS_MODRM_REG_MODE(bRm))
3618 {
3619 /*
3620 * Register, register.
3621 */
3622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3623 IEM_MC_BEGIN(0, 0);
3624 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3625 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3626 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3627 IEM_GET_MODRM_REG(pVCpu, bRm));
3628 IEM_MC_ADVANCE_RIP();
3629 IEM_MC_END();
3630 }
3631 else
3632 {
3633 /*
3634 * Memory, register.
3635 */
3636 IEM_MC_BEGIN(0, 2);
3637 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3639
3640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3643 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3644
3645 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3646 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3647
3648 IEM_MC_ADVANCE_RIP();
3649 IEM_MC_END();
3650 }
3651 return VINF_SUCCESS;
3652}
3653
3654/* Opcode 0xf3 0x0f 0x29 - invalid */
3655/* Opcode 0xf2 0x0f 0x29 - invalid */
3656
3657
3658/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3659FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3660{
3661 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3663 if (IEM_IS_MODRM_REG_MODE(bRm))
3664 {
3665 /*
3666 * Register, register.
3667 */
3668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3669
3670 IEM_MC_BEGIN(3, 1);
3671 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3672 IEM_MC_LOCAL(X86XMMREG, Dst);
3673 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3674 IEM_MC_ARG(uint64_t, u64Src, 2);
3675 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3676 IEM_MC_PREPARE_FPU_USAGE();
3677 IEM_MC_FPU_TO_MMX_MODE();
3678
3679 IEM_MC_REF_MXCSR(pfMxcsr);
3680 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3681 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3682
3683 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3684 IEM_MC_IF_MXCSR_XCPT_PENDING()
3685 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3686 IEM_MC_ELSE()
3687 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3688 IEM_MC_ENDIF();
3689
3690 IEM_MC_ADVANCE_RIP();
3691 IEM_MC_END();
3692 }
3693 else
3694 {
3695 /*
3696 * Register, memory.
3697 */
3698 IEM_MC_BEGIN(3, 3);
3699 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3700 IEM_MC_LOCAL(X86XMMREG, Dst);
3701 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3702 IEM_MC_ARG(uint64_t, u64Src, 2);
3703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3704
3705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3707 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3708 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3709
3710 IEM_MC_PREPARE_FPU_USAGE();
3711 IEM_MC_FPU_TO_MMX_MODE();
3712 IEM_MC_REF_MXCSR(pfMxcsr);
3713
3714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3715 IEM_MC_IF_MXCSR_XCPT_PENDING()
3716 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3717 IEM_MC_ELSE()
3718 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3719 IEM_MC_ENDIF();
3720
3721 IEM_MC_ADVANCE_RIP();
3722 IEM_MC_END();
3723 }
3724 return VINF_SUCCESS;
3725}
3726
3727
3728/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3729FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3730{
3731 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3733 if (IEM_IS_MODRM_REG_MODE(bRm))
3734 {
3735 /*
3736 * Register, register.
3737 */
3738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3739
3740 IEM_MC_BEGIN(3, 1);
3741 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3742 IEM_MC_LOCAL(X86XMMREG, Dst);
3743 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3744 IEM_MC_ARG(uint64_t, u64Src, 2);
3745 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3746 IEM_MC_PREPARE_FPU_USAGE();
3747 IEM_MC_FPU_TO_MMX_MODE();
3748
3749 IEM_MC_REF_MXCSR(pfMxcsr);
3750 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3751
3752 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3753 IEM_MC_IF_MXCSR_XCPT_PENDING()
3754 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3755 IEM_MC_ELSE()
3756 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3757 IEM_MC_ENDIF();
3758
3759 IEM_MC_ADVANCE_RIP();
3760 IEM_MC_END();
3761 }
3762 else
3763 {
3764 /*
3765 * Register, memory.
3766 */
3767 IEM_MC_BEGIN(3, 3);
3768 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3769 IEM_MC_LOCAL(X86XMMREG, Dst);
3770 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3771 IEM_MC_ARG(uint64_t, u64Src, 2);
3772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3773
3774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3777 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3778
3779 /* Doesn't cause a transition to MMX mode. */
3780 IEM_MC_PREPARE_SSE_USAGE();
3781 IEM_MC_REF_MXCSR(pfMxcsr);
3782
3783 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3784 IEM_MC_IF_MXCSR_XCPT_PENDING()
3785 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3786 IEM_MC_ELSE()
3787 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3788 IEM_MC_ENDIF();
3789
3790 IEM_MC_ADVANCE_RIP();
3791 IEM_MC_END();
3792 }
3793 return VINF_SUCCESS;
3794}
3795
3796
3797/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3798FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3799{
3800 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3801
3802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3803 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3804 {
3805 if (IEM_IS_MODRM_REG_MODE(bRm))
3806 {
3807 /* XMM, greg64 */
3808 IEM_MC_BEGIN(3, 4);
3809 IEM_MC_LOCAL(uint32_t, fMxcsr);
3810 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3811 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3812 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3813 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3814
3815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3816 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3817 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3818
3819 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3820 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3821 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3822 IEM_MC_IF_MXCSR_XCPT_PENDING()
3823 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3824 IEM_MC_ELSE()
3825 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3826 IEM_MC_ENDIF();
3827
3828 IEM_MC_ADVANCE_RIP();
3829 IEM_MC_END();
3830 }
3831 else
3832 {
3833 /* XMM, [mem64] */
3834 IEM_MC_BEGIN(3, 4);
3835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3836 IEM_MC_LOCAL(uint32_t, fMxcsr);
3837 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3838 IEM_MC_LOCAL(int64_t, i64Src);
3839 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3840 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3841 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3842
3843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3846 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3847
3848 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3849 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3850 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3851 IEM_MC_IF_MXCSR_XCPT_PENDING()
3852 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3853 IEM_MC_ELSE()
3854 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3855 IEM_MC_ENDIF();
3856
3857 IEM_MC_ADVANCE_RIP();
3858 IEM_MC_END();
3859 }
3860 }
3861 else
3862 {
3863 if (IEM_IS_MODRM_REG_MODE(bRm))
3864 {
3865 /* greg, XMM */
3866 IEM_MC_BEGIN(3, 4);
3867 IEM_MC_LOCAL(uint32_t, fMxcsr);
3868 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3869 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3870 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3871 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3872
3873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3874 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3875 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3876
3877 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3878 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3879 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3880 IEM_MC_IF_MXCSR_XCPT_PENDING()
3881 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3882 IEM_MC_ELSE()
3883 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3884 IEM_MC_ENDIF();
3885
3886 IEM_MC_ADVANCE_RIP();
3887 IEM_MC_END();
3888 }
3889 else
3890 {
3891 /* greg, [mem32] */
3892 IEM_MC_BEGIN(3, 4);
3893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3894 IEM_MC_LOCAL(uint32_t, fMxcsr);
3895 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3896 IEM_MC_LOCAL(int32_t, i32Src);
3897 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3898 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3899 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3900
3901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3903 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3904 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3905
3906 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3907 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3908 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3909 IEM_MC_IF_MXCSR_XCPT_PENDING()
3910 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3911 IEM_MC_ELSE()
3912 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3913 IEM_MC_ENDIF();
3914
3915 IEM_MC_ADVANCE_RIP();
3916 IEM_MC_END();
3917 }
3918 }
3919 return VINF_SUCCESS;
3920}
3921
3922
3923/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3924FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3925{
3926 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3927
3928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3929 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3930 {
3931 if (IEM_IS_MODRM_REG_MODE(bRm))
3932 {
3933 /* XMM, greg64 */
3934 IEM_MC_BEGIN(3, 4);
3935 IEM_MC_LOCAL(uint32_t, fMxcsr);
3936 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3937 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3938 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3939 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3940
3941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3942 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3943 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3944
3945 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3946 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3947 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3948 IEM_MC_IF_MXCSR_XCPT_PENDING()
3949 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3950 IEM_MC_ELSE()
3951 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3952 IEM_MC_ENDIF();
3953
3954 IEM_MC_ADVANCE_RIP();
3955 IEM_MC_END();
3956 }
3957 else
3958 {
3959 /* XMM, [mem64] */
3960 IEM_MC_BEGIN(3, 4);
3961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3962 IEM_MC_LOCAL(uint32_t, fMxcsr);
3963 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3964 IEM_MC_LOCAL(int64_t, i64Src);
3965 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3966 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3967 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3968
3969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3971 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3972 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3973
3974 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3975 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3976 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3977 IEM_MC_IF_MXCSR_XCPT_PENDING()
3978 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3979 IEM_MC_ELSE()
3980 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3981 IEM_MC_ENDIF();
3982
3983 IEM_MC_ADVANCE_RIP();
3984 IEM_MC_END();
3985 }
3986 }
3987 else
3988 {
3989 if (IEM_IS_MODRM_REG_MODE(bRm))
3990 {
3991 /* greg, XMM */
3992 IEM_MC_BEGIN(3, 4);
3993 IEM_MC_LOCAL(uint32_t, fMxcsr);
3994 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3995 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3996 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3997 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3998
3999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4000 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4001 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4002
4003 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
4004 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
4005 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4006 IEM_MC_IF_MXCSR_XCPT_PENDING()
4007 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4008 IEM_MC_ELSE()
4009 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
4010 IEM_MC_ENDIF();
4011
4012 IEM_MC_ADVANCE_RIP();
4013 IEM_MC_END();
4014 }
4015 else
4016 {
4017 /* greg, [mem32] */
4018 IEM_MC_BEGIN(3, 4);
4019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4020 IEM_MC_LOCAL(uint32_t, fMxcsr);
4021 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
4022 IEM_MC_LOCAL(int32_t, i32Src);
4023 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4024 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
4025 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
4026
4027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4029 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4030 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4031
4032 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4033 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
4034 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4035 IEM_MC_IF_MXCSR_XCPT_PENDING()
4036 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4037 IEM_MC_ELSE()
4038 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
4039 IEM_MC_ENDIF();
4040
4041 IEM_MC_ADVANCE_RIP();
4042 IEM_MC_END();
4043 }
4044 }
4045 return VINF_SUCCESS;
4046}
4047
4048
4049/**
4050 * @opcode 0x2b
4051 * @opcodesub !11 mr/reg
4052 * @oppfx none
4053 * @opcpuid sse
4054 * @opgroup og_sse1_cachect
4055 * @opxcpttype 1
4056 * @optest op1=1 op2=2 -> op1=2
4057 * @optest op1=0 op2=-42 -> op1=-42
4058 */
4059FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4060{
4061 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4063 if (IEM_IS_MODRM_MEM_MODE(bRm))
4064 {
4065 /*
4066 * memory, register.
4067 */
4068 IEM_MC_BEGIN(0, 2);
4069 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4071
4072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4074 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4075 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4076
4077 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4078 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4079
4080 IEM_MC_ADVANCE_RIP();
4081 IEM_MC_END();
4082 }
4083 /* The register, register encoding is invalid. */
4084 else
4085 return IEMOP_RAISE_INVALID_OPCODE();
4086 return VINF_SUCCESS;
4087}
4088
4089/**
4090 * @opcode 0x2b
4091 * @opcodesub !11 mr/reg
4092 * @oppfx 0x66
4093 * @opcpuid sse2
4094 * @opgroup og_sse2_cachect
4095 * @opxcpttype 1
4096 * @optest op1=1 op2=2 -> op1=2
4097 * @optest op1=0 op2=-42 -> op1=-42
4098 */
4099FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4100{
4101 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4103 if (IEM_IS_MODRM_MEM_MODE(bRm))
4104 {
4105 /*
4106 * memory, register.
4107 */
4108 IEM_MC_BEGIN(0, 2);
4109 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4111
4112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4114 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4115 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4116
4117 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4118 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4119
4120 IEM_MC_ADVANCE_RIP();
4121 IEM_MC_END();
4122 }
4123 /* The register, register encoding is invalid. */
4124 else
4125 return IEMOP_RAISE_INVALID_OPCODE();
4126 return VINF_SUCCESS;
4127}
4128/* Opcode 0xf3 0x0f 0x2b - invalid */
4129/* Opcode 0xf2 0x0f 0x2b - invalid */
4130
4131
4132/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4133FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4134{
4135 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4137 if (IEM_IS_MODRM_REG_MODE(bRm))
4138 {
4139 /*
4140 * Register, register.
4141 */
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4143
4144 IEM_MC_BEGIN(3, 1);
4145 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4146 IEM_MC_LOCAL(uint64_t, u64Dst);
4147 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4148 IEM_MC_ARG(uint64_t, u64Src, 2);
4149 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4150 IEM_MC_PREPARE_FPU_USAGE();
4151 IEM_MC_FPU_TO_MMX_MODE();
4152
4153 IEM_MC_REF_MXCSR(pfMxcsr);
4154 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4155
4156 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4157 IEM_MC_IF_MXCSR_XCPT_PENDING()
4158 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4159 IEM_MC_ELSE()
4160 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4161 IEM_MC_ENDIF();
4162
4163 IEM_MC_ADVANCE_RIP();
4164 IEM_MC_END();
4165 }
4166 else
4167 {
4168 /*
4169 * Register, memory.
4170 */
4171 IEM_MC_BEGIN(3, 2);
4172 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4173 IEM_MC_LOCAL(uint64_t, u64Dst);
4174 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4175 IEM_MC_ARG(uint64_t, u64Src, 2);
4176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4177
4178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4180 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4181 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4182
4183 IEM_MC_PREPARE_FPU_USAGE();
4184 IEM_MC_FPU_TO_MMX_MODE();
4185 IEM_MC_REF_MXCSR(pfMxcsr);
4186
4187 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4188 IEM_MC_IF_MXCSR_XCPT_PENDING()
4189 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4190 IEM_MC_ELSE()
4191 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4192 IEM_MC_ENDIF();
4193
4194 IEM_MC_ADVANCE_RIP();
4195 IEM_MC_END();
4196 }
4197 return VINF_SUCCESS;
4198}
4199
4200
4201/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4202FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4203{
4204 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4206 if (IEM_IS_MODRM_REG_MODE(bRm))
4207 {
4208 /*
4209 * Register, register.
4210 */
4211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4212
4213 IEM_MC_BEGIN(3, 1);
4214 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4215 IEM_MC_LOCAL(uint64_t, u64Dst);
4216 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4217 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4218 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4219 IEM_MC_PREPARE_FPU_USAGE();
4220 IEM_MC_FPU_TO_MMX_MODE();
4221
4222 IEM_MC_REF_MXCSR(pfMxcsr);
4223 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4224
4225 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4226 IEM_MC_IF_MXCSR_XCPT_PENDING()
4227 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4228 IEM_MC_ELSE()
4229 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4230 IEM_MC_ENDIF();
4231
4232 IEM_MC_ADVANCE_RIP();
4233 IEM_MC_END();
4234 }
4235 else
4236 {
4237 /*
4238 * Register, memory.
4239 */
4240 IEM_MC_BEGIN(3, 3);
4241 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4242 IEM_MC_LOCAL(uint64_t, u64Dst);
4243 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4244 IEM_MC_LOCAL(X86XMMREG, uSrc);
4245 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4247
4248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4250 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4251 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4252
4253 IEM_MC_PREPARE_FPU_USAGE();
4254 IEM_MC_FPU_TO_MMX_MODE();
4255
4256 IEM_MC_REF_MXCSR(pfMxcsr);
4257
4258 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4259 IEM_MC_IF_MXCSR_XCPT_PENDING()
4260 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4261 IEM_MC_ELSE()
4262 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4263 IEM_MC_ENDIF();
4264
4265 IEM_MC_ADVANCE_RIP();
4266 IEM_MC_END();
4267 }
4268 return VINF_SUCCESS;
4269}
4270
4271
4272/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4273FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4274{
4275 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4276
4277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4278 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4279 {
4280 if (IEM_IS_MODRM_REG_MODE(bRm))
4281 {
4282 /* greg64, XMM */
4283 IEM_MC_BEGIN(3, 4);
4284 IEM_MC_LOCAL(uint32_t, fMxcsr);
4285 IEM_MC_LOCAL(int64_t, i64Dst);
4286 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4287 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4288 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4289
4290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4291 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4292 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4293
4294 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4295 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4296 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4297 IEM_MC_IF_MXCSR_XCPT_PENDING()
4298 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4299 IEM_MC_ELSE()
4300 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4301 IEM_MC_ENDIF();
4302
4303 IEM_MC_ADVANCE_RIP();
4304 IEM_MC_END();
4305 }
4306 else
4307 {
4308 /* greg64, [mem64] */
4309 IEM_MC_BEGIN(3, 4);
4310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4311 IEM_MC_LOCAL(uint32_t, fMxcsr);
4312 IEM_MC_LOCAL(int64_t, i64Dst);
4313 IEM_MC_LOCAL(uint32_t, u32Src);
4314 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4315 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4316 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4317
4318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4320 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4321 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4322
4323 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4324 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4325 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4326 IEM_MC_IF_MXCSR_XCPT_PENDING()
4327 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4328 IEM_MC_ELSE()
4329 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4330 IEM_MC_ENDIF();
4331
4332 IEM_MC_ADVANCE_RIP();
4333 IEM_MC_END();
4334 }
4335 }
4336 else
4337 {
4338 if (IEM_IS_MODRM_REG_MODE(bRm))
4339 {
4340 /* greg, XMM */
4341 IEM_MC_BEGIN(3, 4);
4342 IEM_MC_LOCAL(uint32_t, fMxcsr);
4343 IEM_MC_LOCAL(int32_t, i32Dst);
4344 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4345 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4346 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4347
4348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4350 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4351
4352 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4353 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4354 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4355 IEM_MC_IF_MXCSR_XCPT_PENDING()
4356 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4357 IEM_MC_ELSE()
4358 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4359 IEM_MC_ENDIF();
4360
4361 IEM_MC_ADVANCE_RIP();
4362 IEM_MC_END();
4363 }
4364 else
4365 {
4366 /* greg, [mem] */
4367 IEM_MC_BEGIN(3, 4);
4368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4369 IEM_MC_LOCAL(uint32_t, fMxcsr);
4370 IEM_MC_LOCAL(int32_t, i32Dst);
4371 IEM_MC_LOCAL(uint32_t, u32Src);
4372 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4373 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4374 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4375
4376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4378 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4379 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4380
4381 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4382 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4383 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4384 IEM_MC_IF_MXCSR_XCPT_PENDING()
4385 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4386 IEM_MC_ELSE()
4387 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4388 IEM_MC_ENDIF();
4389
4390 IEM_MC_ADVANCE_RIP();
4391 IEM_MC_END();
4392 }
4393 }
4394 return VINF_SUCCESS;
4395}
4396
4397
4398/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4399FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4400{
4401 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4402
4403 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4404 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4405 {
4406 if (IEM_IS_MODRM_REG_MODE(bRm))
4407 {
4408 /* greg64, XMM */
4409 IEM_MC_BEGIN(3, 4);
4410 IEM_MC_LOCAL(uint32_t, fMxcsr);
4411 IEM_MC_LOCAL(int64_t, i64Dst);
4412 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4413 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4414 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4415
4416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4417 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4418 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4419
4420 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4421 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4422 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4423 IEM_MC_IF_MXCSR_XCPT_PENDING()
4424 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4425 IEM_MC_ELSE()
4426 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4427 IEM_MC_ENDIF();
4428
4429 IEM_MC_ADVANCE_RIP();
4430 IEM_MC_END();
4431 }
4432 else
4433 {
4434 /* greg64, [mem64] */
4435 IEM_MC_BEGIN(3, 4);
4436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4437 IEM_MC_LOCAL(uint32_t, fMxcsr);
4438 IEM_MC_LOCAL(int64_t, i64Dst);
4439 IEM_MC_LOCAL(uint64_t, u64Src);
4440 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4441 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4442 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4443
4444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4446 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4447 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4448
4449 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4450 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4451 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4452 IEM_MC_IF_MXCSR_XCPT_PENDING()
4453 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4454 IEM_MC_ELSE()
4455 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4456 IEM_MC_ENDIF();
4457
4458 IEM_MC_ADVANCE_RIP();
4459 IEM_MC_END();
4460 }
4461 }
4462 else
4463 {
4464 if (IEM_IS_MODRM_REG_MODE(bRm))
4465 {
4466 /* greg, XMM */
4467 IEM_MC_BEGIN(3, 4);
4468 IEM_MC_LOCAL(uint32_t, fMxcsr);
4469 IEM_MC_LOCAL(int32_t, i32Dst);
4470 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4471 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4472 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4473
4474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4475 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4476 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4477
4478 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4479 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4480 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4481 IEM_MC_IF_MXCSR_XCPT_PENDING()
4482 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4483 IEM_MC_ELSE()
4484 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4485 IEM_MC_ENDIF();
4486
4487 IEM_MC_ADVANCE_RIP();
4488 IEM_MC_END();
4489 }
4490 else
4491 {
4492 /* greg, [mem] */
4493 IEM_MC_BEGIN(3, 4);
4494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4495 IEM_MC_LOCAL(uint32_t, fMxcsr);
4496 IEM_MC_LOCAL(int32_t, i32Dst);
4497 IEM_MC_LOCAL(uint64_t, u64Src);
4498 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4499 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4500 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4501
4502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4504 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4505 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4506
4507 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4508 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4509 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4510 IEM_MC_IF_MXCSR_XCPT_PENDING()
4511 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4512 IEM_MC_ELSE()
4513 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4514 IEM_MC_ENDIF();
4515
4516 IEM_MC_ADVANCE_RIP();
4517 IEM_MC_END();
4518 }
4519 }
4520 return VINF_SUCCESS;
4521}
4522
4523
4524/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4525FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4526{
4527 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4529 if (IEM_IS_MODRM_REG_MODE(bRm))
4530 {
4531 /*
4532 * Register, register.
4533 */
4534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4535
4536 IEM_MC_BEGIN(3, 1);
4537 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4538 IEM_MC_LOCAL(uint64_t, u64Dst);
4539 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4540 IEM_MC_ARG(uint64_t, u64Src, 2);
4541 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4542 IEM_MC_PREPARE_FPU_USAGE();
4543 IEM_MC_FPU_TO_MMX_MODE();
4544
4545 IEM_MC_REF_MXCSR(pfMxcsr);
4546 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4547
4548 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4549 IEM_MC_IF_MXCSR_XCPT_PENDING()
4550 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4551 IEM_MC_ELSE()
4552 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4553 IEM_MC_ENDIF();
4554
4555 IEM_MC_ADVANCE_RIP();
4556 IEM_MC_END();
4557 }
4558 else
4559 {
4560 /*
4561 * Register, memory.
4562 */
4563 IEM_MC_BEGIN(3, 2);
4564 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4565 IEM_MC_LOCAL(uint64_t, u64Dst);
4566 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4567 IEM_MC_ARG(uint64_t, u64Src, 2);
4568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4569
4570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4573 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4574
4575 IEM_MC_PREPARE_FPU_USAGE();
4576 IEM_MC_FPU_TO_MMX_MODE();
4577 IEM_MC_REF_MXCSR(pfMxcsr);
4578
4579 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4580 IEM_MC_IF_MXCSR_XCPT_PENDING()
4581 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4582 IEM_MC_ELSE()
4583 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4584 IEM_MC_ENDIF();
4585
4586 IEM_MC_ADVANCE_RIP();
4587 IEM_MC_END();
4588 }
4589 return VINF_SUCCESS;
4590}
4591
4592
4593/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4594FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4595{
4596 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4598 if (IEM_IS_MODRM_REG_MODE(bRm))
4599 {
4600 /*
4601 * Register, register.
4602 */
4603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4604
4605 IEM_MC_BEGIN(3, 1);
4606 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4607 IEM_MC_LOCAL(uint64_t, u64Dst);
4608 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4609 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4610 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4611 IEM_MC_PREPARE_FPU_USAGE();
4612 IEM_MC_FPU_TO_MMX_MODE();
4613
4614 IEM_MC_REF_MXCSR(pfMxcsr);
4615 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4616
4617 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4618 IEM_MC_IF_MXCSR_XCPT_PENDING()
4619 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4620 IEM_MC_ELSE()
4621 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4622 IEM_MC_ENDIF();
4623
4624 IEM_MC_ADVANCE_RIP();
4625 IEM_MC_END();
4626 }
4627 else
4628 {
4629 /*
4630 * Register, memory.
4631 */
4632 IEM_MC_BEGIN(3, 3);
4633 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4634 IEM_MC_LOCAL(uint64_t, u64Dst);
4635 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4636 IEM_MC_LOCAL(X86XMMREG, uSrc);
4637 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4639
4640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4643 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4644
4645 IEM_MC_PREPARE_FPU_USAGE();
4646 IEM_MC_FPU_TO_MMX_MODE();
4647
4648 IEM_MC_REF_MXCSR(pfMxcsr);
4649
4650 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4651 IEM_MC_IF_MXCSR_XCPT_PENDING()
4652 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4653 IEM_MC_ELSE()
4654 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4655 IEM_MC_ENDIF();
4656
4657 IEM_MC_ADVANCE_RIP();
4658 IEM_MC_END();
4659 }
4660 return VINF_SUCCESS;
4661}
4662
4663
4664/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4665FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4666{
4667 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4668
4669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4670 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4671 {
4672 if (IEM_IS_MODRM_REG_MODE(bRm))
4673 {
4674 /* greg64, XMM */
4675 IEM_MC_BEGIN(3, 4);
4676 IEM_MC_LOCAL(uint32_t, fMxcsr);
4677 IEM_MC_LOCAL(int64_t, i64Dst);
4678 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4679 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4680 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4681
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4683 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4684 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4685
4686 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4687 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4688 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4689 IEM_MC_IF_MXCSR_XCPT_PENDING()
4690 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4691 IEM_MC_ELSE()
4692 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4693 IEM_MC_ENDIF();
4694
4695 IEM_MC_ADVANCE_RIP();
4696 IEM_MC_END();
4697 }
4698 else
4699 {
4700 /* greg64, [mem64] */
4701 IEM_MC_BEGIN(3, 4);
4702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4703 IEM_MC_LOCAL(uint32_t, fMxcsr);
4704 IEM_MC_LOCAL(int64_t, i64Dst);
4705 IEM_MC_LOCAL(uint32_t, u32Src);
4706 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4707 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4708 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4709
4710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4712 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4713 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4714
4715 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4716 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4717 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4718 IEM_MC_IF_MXCSR_XCPT_PENDING()
4719 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4720 IEM_MC_ELSE()
4721 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4722 IEM_MC_ENDIF();
4723
4724 IEM_MC_ADVANCE_RIP();
4725 IEM_MC_END();
4726 }
4727 }
4728 else
4729 {
4730 if (IEM_IS_MODRM_REG_MODE(bRm))
4731 {
4732 /* greg, XMM */
4733 IEM_MC_BEGIN(3, 4);
4734 IEM_MC_LOCAL(uint32_t, fMxcsr);
4735 IEM_MC_LOCAL(int32_t, i32Dst);
4736 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4737 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4738 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4739
4740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4741 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4742 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4743
4744 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4745 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4746 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4747 IEM_MC_IF_MXCSR_XCPT_PENDING()
4748 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4749 IEM_MC_ELSE()
4750 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4751 IEM_MC_ENDIF();
4752
4753 IEM_MC_ADVANCE_RIP();
4754 IEM_MC_END();
4755 }
4756 else
4757 {
4758 /* greg, [mem] */
4759 IEM_MC_BEGIN(3, 4);
4760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4761 IEM_MC_LOCAL(uint32_t, fMxcsr);
4762 IEM_MC_LOCAL(int32_t, i32Dst);
4763 IEM_MC_LOCAL(uint32_t, u32Src);
4764 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4765 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4766 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4767
4768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4770 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4771 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4772
4773 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4774 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4775 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4776 IEM_MC_IF_MXCSR_XCPT_PENDING()
4777 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4778 IEM_MC_ELSE()
4779 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4780 IEM_MC_ENDIF();
4781
4782 IEM_MC_ADVANCE_RIP();
4783 IEM_MC_END();
4784 }
4785 }
4786 return VINF_SUCCESS;
4787}
4788
4789
4790/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4791FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4792{
4793 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4794
4795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4796 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4797 {
4798 if (IEM_IS_MODRM_REG_MODE(bRm))
4799 {
4800 /* greg64, XMM */
4801 IEM_MC_BEGIN(3, 4);
4802 IEM_MC_LOCAL(uint32_t, fMxcsr);
4803 IEM_MC_LOCAL(int64_t, i64Dst);
4804 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4805 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4806 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4807
4808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4809 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4810 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4811
4812 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4813 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4814 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4815 IEM_MC_IF_MXCSR_XCPT_PENDING()
4816 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4817 IEM_MC_ELSE()
4818 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4819 IEM_MC_ENDIF();
4820
4821 IEM_MC_ADVANCE_RIP();
4822 IEM_MC_END();
4823 }
4824 else
4825 {
4826 /* greg64, [mem64] */
4827 IEM_MC_BEGIN(3, 4);
4828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4829 IEM_MC_LOCAL(uint32_t, fMxcsr);
4830 IEM_MC_LOCAL(int64_t, i64Dst);
4831 IEM_MC_LOCAL(uint64_t, u64Src);
4832 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4833 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4834 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4835
4836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4838 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4839 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4840
4841 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4842 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4843 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4844 IEM_MC_IF_MXCSR_XCPT_PENDING()
4845 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4846 IEM_MC_ELSE()
4847 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4848 IEM_MC_ENDIF();
4849
4850 IEM_MC_ADVANCE_RIP();
4851 IEM_MC_END();
4852 }
4853 }
4854 else
4855 {
4856 if (IEM_IS_MODRM_REG_MODE(bRm))
4857 {
4858 /* greg32, XMM */
4859 IEM_MC_BEGIN(3, 4);
4860 IEM_MC_LOCAL(uint32_t, fMxcsr);
4861 IEM_MC_LOCAL(int32_t, i32Dst);
4862 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4863 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4864 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4865
4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4867 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4868 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4869
4870 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4871 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4872 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4873 IEM_MC_IF_MXCSR_XCPT_PENDING()
4874 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4875 IEM_MC_ELSE()
4876 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4877 IEM_MC_ENDIF();
4878
4879 IEM_MC_ADVANCE_RIP();
4880 IEM_MC_END();
4881 }
4882 else
4883 {
4884 /* greg32, [mem64] */
4885 IEM_MC_BEGIN(3, 4);
4886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4887 IEM_MC_LOCAL(uint32_t, fMxcsr);
4888 IEM_MC_LOCAL(int32_t, i32Dst);
4889 IEM_MC_LOCAL(uint64_t, u64Src);
4890 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4891 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4892 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4893
4894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4896 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4897 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4898
4899 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4900 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4901 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4902 IEM_MC_IF_MXCSR_XCPT_PENDING()
4903 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4904 IEM_MC_ELSE()
4905 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4906 IEM_MC_ENDIF();
4907
4908 IEM_MC_ADVANCE_RIP();
4909 IEM_MC_END();
4910 }
4911 }
4912 return VINF_SUCCESS;
4913}
4914
4915
4916/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4917FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4918{
4919 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4921 if (IEM_IS_MODRM_REG_MODE(bRm))
4922 {
4923 /*
4924 * Register, register.
4925 */
4926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4927 IEM_MC_BEGIN(4, 1);
4928 IEM_MC_LOCAL(uint32_t, fEFlags);
4929 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4930 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4931 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4932 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4933 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4934 IEM_MC_PREPARE_SSE_USAGE();
4935 IEM_MC_FETCH_EFLAGS(fEFlags);
4936 IEM_MC_REF_MXCSR(pfMxcsr);
4937 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4938 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4939 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4940 IEM_MC_IF_MXCSR_XCPT_PENDING()
4941 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4942 IEM_MC_ELSE()
4943 IEM_MC_COMMIT_EFLAGS(fEFlags);
4944 IEM_MC_ENDIF();
4945
4946 IEM_MC_ADVANCE_RIP();
4947 IEM_MC_END();
4948 }
4949 else
4950 {
4951 /*
4952 * Register, memory.
4953 */
4954 IEM_MC_BEGIN(4, 3);
4955 IEM_MC_LOCAL(uint32_t, fEFlags);
4956 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4957 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4958 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4959 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4960 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4962
4963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4965 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4966 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4967
4968 IEM_MC_PREPARE_SSE_USAGE();
4969 IEM_MC_FETCH_EFLAGS(fEFlags);
4970 IEM_MC_REF_MXCSR(pfMxcsr);
4971 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4972 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4973 IEM_MC_IF_MXCSR_XCPT_PENDING()
4974 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4975 IEM_MC_ELSE()
4976 IEM_MC_COMMIT_EFLAGS(fEFlags);
4977 IEM_MC_ENDIF();
4978
4979 IEM_MC_ADVANCE_RIP();
4980 IEM_MC_END();
4981 }
4982 return VINF_SUCCESS;
4983}
4984
4985
4986/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4987FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4988{
4989 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4991 if (IEM_IS_MODRM_REG_MODE(bRm))
4992 {
4993 /*
4994 * Register, register.
4995 */
4996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4997 IEM_MC_BEGIN(4, 1);
4998 IEM_MC_LOCAL(uint32_t, fEFlags);
4999 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5000 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5001 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5002 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5003 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5004 IEM_MC_PREPARE_SSE_USAGE();
5005 IEM_MC_FETCH_EFLAGS(fEFlags);
5006 IEM_MC_REF_MXCSR(pfMxcsr);
5007 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5008 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5009 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5010 IEM_MC_IF_MXCSR_XCPT_PENDING()
5011 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5012 IEM_MC_ELSE()
5013 IEM_MC_COMMIT_EFLAGS(fEFlags);
5014 IEM_MC_ENDIF();
5015
5016 IEM_MC_ADVANCE_RIP();
5017 IEM_MC_END();
5018 }
5019 else
5020 {
5021 /*
5022 * Register, memory.
5023 */
5024 IEM_MC_BEGIN(4, 3);
5025 IEM_MC_LOCAL(uint32_t, fEFlags);
5026 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5027 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5028 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5029 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5030 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5032
5033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5035 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5036 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5037
5038 IEM_MC_PREPARE_SSE_USAGE();
5039 IEM_MC_FETCH_EFLAGS(fEFlags);
5040 IEM_MC_REF_MXCSR(pfMxcsr);
5041 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5042 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5043 IEM_MC_IF_MXCSR_XCPT_PENDING()
5044 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5045 IEM_MC_ELSE()
5046 IEM_MC_COMMIT_EFLAGS(fEFlags);
5047 IEM_MC_ENDIF();
5048
5049 IEM_MC_ADVANCE_RIP();
5050 IEM_MC_END();
5051 }
5052 return VINF_SUCCESS;
5053}
5054
5055
5056/* Opcode 0xf3 0x0f 0x2e - invalid */
5057/* Opcode 0xf2 0x0f 0x2e - invalid */
5058
5059
5060/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5061FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5062{
5063 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5065 if (IEM_IS_MODRM_REG_MODE(bRm))
5066 {
5067 /*
5068 * Register, register.
5069 */
5070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5071 IEM_MC_BEGIN(4, 1);
5072 IEM_MC_LOCAL(uint32_t, fEFlags);
5073 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5074 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5075 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5076 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5077 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5078 IEM_MC_PREPARE_SSE_USAGE();
5079 IEM_MC_FETCH_EFLAGS(fEFlags);
5080 IEM_MC_REF_MXCSR(pfMxcsr);
5081 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5082 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5083 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5084 IEM_MC_IF_MXCSR_XCPT_PENDING()
5085 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5086 IEM_MC_ELSE()
5087 IEM_MC_COMMIT_EFLAGS(fEFlags);
5088 IEM_MC_ENDIF();
5089
5090 IEM_MC_ADVANCE_RIP();
5091 IEM_MC_END();
5092 }
5093 else
5094 {
5095 /*
5096 * Register, memory.
5097 */
5098 IEM_MC_BEGIN(4, 3);
5099 IEM_MC_LOCAL(uint32_t, fEFlags);
5100 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5101 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5102 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5103 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5104 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5106
5107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5109 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5110 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5111
5112 IEM_MC_PREPARE_SSE_USAGE();
5113 IEM_MC_FETCH_EFLAGS(fEFlags);
5114 IEM_MC_REF_MXCSR(pfMxcsr);
5115 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5116 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5117 IEM_MC_IF_MXCSR_XCPT_PENDING()
5118 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5119 IEM_MC_ELSE()
5120 IEM_MC_COMMIT_EFLAGS(fEFlags);
5121 IEM_MC_ENDIF();
5122
5123 IEM_MC_ADVANCE_RIP();
5124 IEM_MC_END();
5125 }
5126 return VINF_SUCCESS;
5127}
5128
5129
5130/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5131FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5132{
5133 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5135 if (IEM_IS_MODRM_REG_MODE(bRm))
5136 {
5137 /*
5138 * Register, register.
5139 */
5140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5141 IEM_MC_BEGIN(4, 1);
5142 IEM_MC_LOCAL(uint32_t, fEFlags);
5143 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5144 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5145 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5146 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5147 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5148 IEM_MC_PREPARE_SSE_USAGE();
5149 IEM_MC_FETCH_EFLAGS(fEFlags);
5150 IEM_MC_REF_MXCSR(pfMxcsr);
5151 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5152 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5153 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5154 IEM_MC_IF_MXCSR_XCPT_PENDING()
5155 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5156 IEM_MC_ELSE()
5157 IEM_MC_COMMIT_EFLAGS(fEFlags);
5158 IEM_MC_ENDIF();
5159
5160 IEM_MC_ADVANCE_RIP();
5161 IEM_MC_END();
5162 }
5163 else
5164 {
5165 /*
5166 * Register, memory.
5167 */
5168 IEM_MC_BEGIN(4, 3);
5169 IEM_MC_LOCAL(uint32_t, fEFlags);
5170 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5171 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5172 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5173 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5174 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5176
5177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5180 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5181
5182 IEM_MC_PREPARE_SSE_USAGE();
5183 IEM_MC_FETCH_EFLAGS(fEFlags);
5184 IEM_MC_REF_MXCSR(pfMxcsr);
5185 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5186 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5187 IEM_MC_IF_MXCSR_XCPT_PENDING()
5188 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5189 IEM_MC_ELSE()
5190 IEM_MC_COMMIT_EFLAGS(fEFlags);
5191 IEM_MC_ENDIF();
5192
5193 IEM_MC_ADVANCE_RIP();
5194 IEM_MC_END();
5195 }
5196 return VINF_SUCCESS;
5197}
5198
5199
5200/* Opcode 0xf3 0x0f 0x2f - invalid */
5201/* Opcode 0xf2 0x0f 0x2f - invalid */
5202
5203/** Opcode 0x0f 0x30. */
5204FNIEMOP_DEF(iemOp_wrmsr)
5205{
5206 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
5209}
5210
5211
5212/** Opcode 0x0f 0x31. */
5213FNIEMOP_DEF(iemOp_rdtsc)
5214{
5215 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5217 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
5218}
5219
5220
5221/** Opcode 0x0f 0x33. */
5222FNIEMOP_DEF(iemOp_rdmsr)
5223{
5224 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5226 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
5227}
5228
5229
5230/** Opcode 0x0f 0x34. */
5231FNIEMOP_DEF(iemOp_rdpmc)
5232{
5233 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5235 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
5236}
5237
5238
5239/** Opcode 0x0f 0x34. */
5240FNIEMOP_DEF(iemOp_sysenter)
5241{
5242 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5244 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
5245}
5246
5247/** Opcode 0x0f 0x35. */
5248FNIEMOP_DEF(iemOp_sysexit)
5249{
5250 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5252 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5253}
5254
5255/** Opcode 0x0f 0x37. */
5256FNIEMOP_STUB(iemOp_getsec);
5257
5258
5259/** Opcode 0x0f 0x38. */
5260FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5261{
5262#ifdef IEM_WITH_THREE_0F_38
5263 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5264 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5265#else
5266 IEMOP_BITCH_ABOUT_STUB();
5267 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5268#endif
5269}
5270
5271
5272/** Opcode 0x0f 0x3a. */
5273FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5274{
5275#ifdef IEM_WITH_THREE_0F_3A
5276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5277 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5278#else
5279 IEMOP_BITCH_ABOUT_STUB();
5280 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5281#endif
5282}
5283
5284
5285/**
5286 * Implements a conditional move.
5287 *
5288 * Wish there was an obvious way to do this where we could share and reduce
5289 * code bloat.
5290 *
5291 * @param a_Cnd The conditional "microcode" operation.
5292 */
5293#define CMOV_X(a_Cnd) \
5294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5295 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5296 { \
5297 switch (pVCpu->iem.s.enmEffOpSize) \
5298 { \
5299 case IEMMODE_16BIT: \
5300 IEM_MC_BEGIN(0, 1); \
5301 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5302 a_Cnd { \
5303 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5304 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5305 } IEM_MC_ENDIF(); \
5306 IEM_MC_ADVANCE_RIP(); \
5307 IEM_MC_END(); \
5308 return VINF_SUCCESS; \
5309 \
5310 case IEMMODE_32BIT: \
5311 IEM_MC_BEGIN(0, 1); \
5312 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5313 a_Cnd { \
5314 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5315 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5316 } IEM_MC_ELSE() { \
5317 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5318 } IEM_MC_ENDIF(); \
5319 IEM_MC_ADVANCE_RIP(); \
5320 IEM_MC_END(); \
5321 return VINF_SUCCESS; \
5322 \
5323 case IEMMODE_64BIT: \
5324 IEM_MC_BEGIN(0, 1); \
5325 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5326 a_Cnd { \
5327 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5328 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5329 } IEM_MC_ENDIF(); \
5330 IEM_MC_ADVANCE_RIP(); \
5331 IEM_MC_END(); \
5332 return VINF_SUCCESS; \
5333 \
5334 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5335 } \
5336 } \
5337 else \
5338 { \
5339 switch (pVCpu->iem.s.enmEffOpSize) \
5340 { \
5341 case IEMMODE_16BIT: \
5342 IEM_MC_BEGIN(0, 2); \
5343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5344 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5346 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5347 a_Cnd { \
5348 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5349 } IEM_MC_ENDIF(); \
5350 IEM_MC_ADVANCE_RIP(); \
5351 IEM_MC_END(); \
5352 return VINF_SUCCESS; \
5353 \
5354 case IEMMODE_32BIT: \
5355 IEM_MC_BEGIN(0, 2); \
5356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5357 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5359 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5360 a_Cnd { \
5361 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5362 } IEM_MC_ELSE() { \
5363 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5364 } IEM_MC_ENDIF(); \
5365 IEM_MC_ADVANCE_RIP(); \
5366 IEM_MC_END(); \
5367 return VINF_SUCCESS; \
5368 \
5369 case IEMMODE_64BIT: \
5370 IEM_MC_BEGIN(0, 2); \
5371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5372 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5374 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5375 a_Cnd { \
5376 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5377 } IEM_MC_ENDIF(); \
5378 IEM_MC_ADVANCE_RIP(); \
5379 IEM_MC_END(); \
5380 return VINF_SUCCESS; \
5381 \
5382 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5383 } \
5384 } do {} while (0)
5385
5386
5387
5388/** Opcode 0x0f 0x40. */
5389FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5390{
5391 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5392 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5393}
5394
5395
5396/** Opcode 0x0f 0x41. */
5397FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5398{
5399 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5400 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5401}
5402
5403
5404/** Opcode 0x0f 0x42. */
5405FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5406{
5407 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5408 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5409}
5410
5411
5412/** Opcode 0x0f 0x43. */
5413FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5414{
5415 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5416 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5417}
5418
5419
5420/** Opcode 0x0f 0x44. */
5421FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5422{
5423 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5424 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5425}
5426
5427
5428/** Opcode 0x0f 0x45. */
5429FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5430{
5431 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5432 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5433}
5434
5435
5436/** Opcode 0x0f 0x46. */
5437FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5438{
5439 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5440 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5441}
5442
5443
5444/** Opcode 0x0f 0x47. */
5445FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5446{
5447 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5448 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5449}
5450
5451
5452/** Opcode 0x0f 0x48. */
5453FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5454{
5455 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5456 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5457}
5458
5459
5460/** Opcode 0x0f 0x49. */
5461FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5462{
5463 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5464 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5465}
5466
5467
5468/** Opcode 0x0f 0x4a. */
5469FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5470{
5471 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5472 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5473}
5474
5475
5476/** Opcode 0x0f 0x4b. */
5477FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5478{
5479 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5480 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5481}
5482
5483
5484/** Opcode 0x0f 0x4c. */
5485FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5486{
5487 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5488 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5489}
5490
5491
5492/** Opcode 0x0f 0x4d. */
5493FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5494{
5495 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5496 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5497}
5498
5499
5500/** Opcode 0x0f 0x4e. */
5501FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5502{
5503 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5504 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5505}
5506
5507
5508/** Opcode 0x0f 0x4f. */
5509FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5510{
5511 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5512 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5513}
5514
5515#undef CMOV_X
5516
5517/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5518FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5519{
5520 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5522 if (IEM_IS_MODRM_REG_MODE(bRm))
5523 {
5524 /*
5525 * Register, register.
5526 */
5527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5528 IEM_MC_BEGIN(2, 1);
5529 IEM_MC_LOCAL(uint8_t, u8Dst);
5530 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5531 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5533 IEM_MC_PREPARE_SSE_USAGE();
5534 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5535 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5536 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5537 IEM_MC_ADVANCE_RIP();
5538 IEM_MC_END();
5539 return VINF_SUCCESS;
5540 }
5541
5542 /* No memory operand. */
5543 return IEMOP_RAISE_INVALID_OPCODE();
5544}
5545
5546
5547/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5548FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5549{
5550 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5552 if (IEM_IS_MODRM_REG_MODE(bRm))
5553 {
5554 /*
5555 * Register, register.
5556 */
5557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5558 IEM_MC_BEGIN(2, 1);
5559 IEM_MC_LOCAL(uint8_t, u8Dst);
5560 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5561 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5562 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5563 IEM_MC_PREPARE_SSE_USAGE();
5564 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5565 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5566 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5567 IEM_MC_ADVANCE_RIP();
5568 IEM_MC_END();
5569 return VINF_SUCCESS;
5570 }
5571
5572 /* No memory operand. */
5573 return IEMOP_RAISE_INVALID_OPCODE();
5574
5575}
5576
5577
5578/* Opcode 0xf3 0x0f 0x50 - invalid */
5579/* Opcode 0xf2 0x0f 0x50 - invalid */
5580
5581
5582/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5583FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5584{
5585 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5586 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5587}
5588
5589
5590/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5591FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5592{
5593 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5594 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5595}
5596
5597
5598/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5599FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5600{
5601 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5602 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5603}
5604
5605
5606/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5607FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5608{
5609 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5610 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5611}
5612
5613
5614/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5615FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
5616/* Opcode 0x66 0x0f 0x52 - invalid */
5617/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5618FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
5619/* Opcode 0xf2 0x0f 0x52 - invalid */
5620
5621/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5622FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5623/* Opcode 0x66 0x0f 0x53 - invalid */
5624/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5625FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5626/* Opcode 0xf2 0x0f 0x53 - invalid */
5627
5628
5629/** Opcode 0x0f 0x54 - andps Vps, Wps */
5630FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5631{
5632 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5633 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5634}
5635
5636
5637/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5638FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5639{
5640 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5641 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5642}
5643
5644
5645/* Opcode 0xf3 0x0f 0x54 - invalid */
5646/* Opcode 0xf2 0x0f 0x54 - invalid */
5647
5648
5649/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5650FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5651{
5652 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5653 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5654}
5655
5656
5657/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5658FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5659{
5660 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5661 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5662}
5663
5664
5665/* Opcode 0xf3 0x0f 0x55 - invalid */
5666/* Opcode 0xf2 0x0f 0x55 - invalid */
5667
5668
5669/** Opcode 0x0f 0x56 - orps Vps, Wps */
5670FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5671{
5672 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5673 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5674}
5675
5676
5677/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5678FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5679{
5680 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5681 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5682}
5683
5684
5685/* Opcode 0xf3 0x0f 0x56 - invalid */
5686/* Opcode 0xf2 0x0f 0x56 - invalid */
5687
5688
5689/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5690FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5691{
5692 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5693 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5694}
5695
5696
5697/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5698FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5699{
5700 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5701 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5702}
5703
5704
5705/* Opcode 0xf3 0x0f 0x57 - invalid */
5706/* Opcode 0xf2 0x0f 0x57 - invalid */
5707
5708/** Opcode 0x0f 0x58 - addps Vps, Wps */
5709FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5710{
5711 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5712 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5713}
5714
5715
5716/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5717FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5718{
5719 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5720 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5721}
5722
5723
5724/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5725FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5726{
5727 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5728 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5729}
5730
5731
5732/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5733FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5734{
5735 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5736 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5737}
5738
5739
5740/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5741FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5742{
5743 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5744 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5745}
5746
5747
5748/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5749FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5750{
5751 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5752 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5753}
5754
5755
5756/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5757FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5758{
5759 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5760 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5761}
5762
5763
5764/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5765FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5766{
5767 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5768 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5769}
5770
5771
5772/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5773FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5774{
5775 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5776 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5777}
5778
5779
5780/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5781FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5782{
5783 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5784 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5785}
5786
5787
5788/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5789FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5790{
5791 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5792 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5793}
5794
5795
5796/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5797FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5798{
5799 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5800 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5801}
5802
5803
5804/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5805FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5806{
5807 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5808 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5809}
5810
5811
5812/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5813FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5814{
5815 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5816 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5817}
5818
5819
5820/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5821FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5822{
5823 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5824 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5825}
5826
5827
5828/* Opcode 0xf2 0x0f 0x5b - invalid */
5829
5830
5831/** Opcode 0x0f 0x5c - subps Vps, Wps */
5832FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5833{
5834 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5835 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5836}
5837
5838
5839/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5840FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5841{
5842 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5843 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5844}
5845
5846
5847/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5848FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5849{
5850 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5851 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5852}
5853
5854
5855/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5856FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5857{
5858 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5859 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5860}
5861
5862
5863/** Opcode 0x0f 0x5d - minps Vps, Wps */
5864FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5865{
5866 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5867 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5868}
5869
5870
5871/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5872FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5873{
5874 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5875 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5876}
5877
5878
5879/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5880FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5881{
5882 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5883 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5884}
5885
5886
5887/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5888FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5889{
5890 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5891 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5892}
5893
5894
5895/** Opcode 0x0f 0x5e - divps Vps, Wps */
5896FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5897{
5898 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5899 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5900}
5901
5902
5903/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5904FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5905{
5906 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5907 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5908}
5909
5910
5911/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5912FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5913{
5914 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5915 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5916}
5917
5918
5919/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5920FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5921{
5922 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5923 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5924}
5925
5926
5927/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5928FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5929{
5930 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5931 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5932}
5933
5934
5935/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5936FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5937{
5938 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5939 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5940}
5941
5942
5943/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5944FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5945{
5946 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5947 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5948}
5949
5950
5951/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5952FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5953{
5954 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5955 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5956}
5957
5958
5959/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5960FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5961{
5962 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5963 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5964}
5965
5966
5967/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5968FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5969{
5970 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5971 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5972}
5973
5974
5975/* Opcode 0xf3 0x0f 0x60 - invalid */
5976
5977
5978/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5979FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5980{
5981 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5982 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5983 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5984}
5985
5986
5987/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5988FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5989{
5990 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5991 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5992}
5993
5994
5995/* Opcode 0xf3 0x0f 0x61 - invalid */
5996
5997
5998/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5999FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
6000{
6001 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6002 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
6003}
6004
6005
6006/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
6007FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
6008{
6009 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6010 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
6011}
6012
6013
6014/* Opcode 0xf3 0x0f 0x62 - invalid */
6015
6016
6017
6018/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
6019FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
6020{
6021 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6022 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
6023}
6024
6025
6026/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
6027FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
6028{
6029 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6030 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
6031}
6032
6033
6034/* Opcode 0xf3 0x0f 0x63 - invalid */
6035
6036
6037/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
6038FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
6039{
6040 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6041 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
6042}
6043
6044
6045/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6046FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6047{
6048 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6049 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6050}
6051
6052
6053/* Opcode 0xf3 0x0f 0x64 - invalid */
6054
6055
6056/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6057FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6058{
6059 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6060 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6061}
6062
6063
6064/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6065FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6066{
6067 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6068 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6069}
6070
6071
6072/* Opcode 0xf3 0x0f 0x65 - invalid */
6073
6074
6075/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6076FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6077{
6078 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6079 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6080}
6081
6082
6083/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6084FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6085{
6086 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6087 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6088}
6089
6090
6091/* Opcode 0xf3 0x0f 0x66 - invalid */
6092
6093
6094/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6095FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6096{
6097 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6098 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6099}
6100
6101
6102/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6103FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6104{
6105 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6106 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6107}
6108
6109
6110/* Opcode 0xf3 0x0f 0x67 - invalid */
6111
6112
6113/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6114 * @note Intel and AMD both uses Qd for the second parameter, however they
6115 * both list it as a mmX/mem64 operand and intel describes it as being
6116 * loaded as a qword, so it should be Qq, shouldn't it? */
6117FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6118{
6119 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6120 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6121}
6122
6123
6124/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6125FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6126{
6127 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6128 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6129}
6130
6131
6132/* Opcode 0xf3 0x0f 0x68 - invalid */
6133
6134
6135/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6136 * @note Intel and AMD both uses Qd for the second parameter, however they
6137 * both list it as a mmX/mem64 operand and intel describes it as being
6138 * loaded as a qword, so it should be Qq, shouldn't it? */
6139FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6140{
6141 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6142 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6143}
6144
6145
6146/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6147FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6148{
6149 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6150 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6151
6152}
6153
6154
6155/* Opcode 0xf3 0x0f 0x69 - invalid */
6156
6157
6158/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6159 * @note Intel and AMD both uses Qd for the second parameter, however they
6160 * both list it as a mmX/mem64 operand and intel describes it as being
6161 * loaded as a qword, so it should be Qq, shouldn't it? */
6162FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6163{
6164 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6165 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6166}
6167
6168
6169/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6170FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6171{
6172 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6173 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6174}
6175
6176
6177/* Opcode 0xf3 0x0f 0x6a - invalid */
6178
6179
6180/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6181FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6182{
6183 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6184 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6185}
6186
6187
6188/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6189FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6190{
6191 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6192 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6193}
6194
6195
6196/* Opcode 0xf3 0x0f 0x6b - invalid */
6197
6198
6199/* Opcode 0x0f 0x6c - invalid */
6200
6201
6202/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6203FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6204{
6205 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6206 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6207}
6208
6209
6210/* Opcode 0xf3 0x0f 0x6c - invalid */
6211/* Opcode 0xf2 0x0f 0x6c - invalid */
6212
6213
6214/* Opcode 0x0f 0x6d - invalid */
6215
6216
6217/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6218FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6219{
6220 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6221 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6222}
6223
6224
6225/* Opcode 0xf3 0x0f 0x6d - invalid */
6226
6227
6228FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6229{
6230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6231 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6232 {
6233 /**
6234 * @opcode 0x6e
6235 * @opcodesub rex.w=1
6236 * @oppfx none
6237 * @opcpuid mmx
6238 * @opgroup og_mmx_datamove
6239 * @opxcpttype 5
6240 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6241 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6242 */
6243 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6244 if (IEM_IS_MODRM_REG_MODE(bRm))
6245 {
6246 /* MMX, greg64 */
6247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6248 IEM_MC_BEGIN(0, 1);
6249 IEM_MC_LOCAL(uint64_t, u64Tmp);
6250
6251 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6252 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6253 IEM_MC_FPU_TO_MMX_MODE();
6254
6255 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6256 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6257
6258 IEM_MC_ADVANCE_RIP();
6259 IEM_MC_END();
6260 }
6261 else
6262 {
6263 /* MMX, [mem64] */
6264 IEM_MC_BEGIN(0, 2);
6265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6266 IEM_MC_LOCAL(uint64_t, u64Tmp);
6267
6268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6270 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6271 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6272 IEM_MC_FPU_TO_MMX_MODE();
6273
6274 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6275 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6276
6277 IEM_MC_ADVANCE_RIP();
6278 IEM_MC_END();
6279 }
6280 }
6281 else
6282 {
6283 /**
6284 * @opdone
6285 * @opcode 0x6e
6286 * @opcodesub rex.w=0
6287 * @oppfx none
6288 * @opcpuid mmx
6289 * @opgroup og_mmx_datamove
6290 * @opxcpttype 5
6291 * @opfunction iemOp_movd_q_Pd_Ey
6292 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6293 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6294 */
6295 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6296 if (IEM_IS_MODRM_REG_MODE(bRm))
6297 {
6298 /* MMX, greg */
6299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6300 IEM_MC_BEGIN(0, 1);
6301 IEM_MC_LOCAL(uint64_t, u64Tmp);
6302
6303 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6304 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6305 IEM_MC_FPU_TO_MMX_MODE();
6306
6307 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6308 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6309
6310 IEM_MC_ADVANCE_RIP();
6311 IEM_MC_END();
6312 }
6313 else
6314 {
6315 /* MMX, [mem] */
6316 IEM_MC_BEGIN(0, 2);
6317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6318 IEM_MC_LOCAL(uint32_t, u32Tmp);
6319
6320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6322 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6323 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6324 IEM_MC_FPU_TO_MMX_MODE();
6325
6326 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6327 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6328
6329 IEM_MC_ADVANCE_RIP();
6330 IEM_MC_END();
6331 }
6332 }
6333 return VINF_SUCCESS;
6334}
6335
6336FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6337{
6338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6339 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6340 {
6341 /**
6342 * @opcode 0x6e
6343 * @opcodesub rex.w=1
6344 * @oppfx 0x66
6345 * @opcpuid sse2
6346 * @opgroup og_sse2_simdint_datamove
6347 * @opxcpttype 5
6348 * @optest 64-bit / op1=1 op2=2 -> op1=2
6349 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6350 */
6351 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6352 if (IEM_IS_MODRM_REG_MODE(bRm))
6353 {
6354 /* XMM, greg64 */
6355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6356 IEM_MC_BEGIN(0, 1);
6357 IEM_MC_LOCAL(uint64_t, u64Tmp);
6358
6359 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6360 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6361
6362 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6363 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6364
6365 IEM_MC_ADVANCE_RIP();
6366 IEM_MC_END();
6367 }
6368 else
6369 {
6370 /* XMM, [mem64] */
6371 IEM_MC_BEGIN(0, 2);
6372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6373 IEM_MC_LOCAL(uint64_t, u64Tmp);
6374
6375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6377 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6378 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6379
6380 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6381 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6382
6383 IEM_MC_ADVANCE_RIP();
6384 IEM_MC_END();
6385 }
6386 }
6387 else
6388 {
6389 /**
6390 * @opdone
6391 * @opcode 0x6e
6392 * @opcodesub rex.w=0
6393 * @oppfx 0x66
6394 * @opcpuid sse2
6395 * @opgroup og_sse2_simdint_datamove
6396 * @opxcpttype 5
6397 * @opfunction iemOp_movd_q_Vy_Ey
6398 * @optest op1=1 op2=2 -> op1=2
6399 * @optest op1=0 op2=-42 -> op1=-42
6400 */
6401 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6402 if (IEM_IS_MODRM_REG_MODE(bRm))
6403 {
6404 /* XMM, greg32 */
6405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6406 IEM_MC_BEGIN(0, 1);
6407 IEM_MC_LOCAL(uint32_t, u32Tmp);
6408
6409 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6410 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6411
6412 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6413 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6414
6415 IEM_MC_ADVANCE_RIP();
6416 IEM_MC_END();
6417 }
6418 else
6419 {
6420 /* XMM, [mem32] */
6421 IEM_MC_BEGIN(0, 2);
6422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6423 IEM_MC_LOCAL(uint32_t, u32Tmp);
6424
6425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6427 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6428 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6429
6430 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6431 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6432
6433 IEM_MC_ADVANCE_RIP();
6434 IEM_MC_END();
6435 }
6436 }
6437 return VINF_SUCCESS;
6438}
6439
6440/* Opcode 0xf3 0x0f 0x6e - invalid */
6441
6442
6443/**
6444 * @opcode 0x6f
6445 * @oppfx none
6446 * @opcpuid mmx
6447 * @opgroup og_mmx_datamove
6448 * @opxcpttype 5
6449 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6450 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6451 */
6452FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6453{
6454 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6456 if (IEM_IS_MODRM_REG_MODE(bRm))
6457 {
6458 /*
6459 * Register, register.
6460 */
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462 IEM_MC_BEGIN(0, 1);
6463 IEM_MC_LOCAL(uint64_t, u64Tmp);
6464
6465 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6467 IEM_MC_FPU_TO_MMX_MODE();
6468
6469 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6470 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6471
6472 IEM_MC_ADVANCE_RIP();
6473 IEM_MC_END();
6474 }
6475 else
6476 {
6477 /*
6478 * Register, memory.
6479 */
6480 IEM_MC_BEGIN(0, 2);
6481 IEM_MC_LOCAL(uint64_t, u64Tmp);
6482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6483
6484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6486 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6487 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6488 IEM_MC_FPU_TO_MMX_MODE();
6489
6490 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6491 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6492
6493 IEM_MC_ADVANCE_RIP();
6494 IEM_MC_END();
6495 }
6496 return VINF_SUCCESS;
6497}
6498
6499/**
6500 * @opcode 0x6f
6501 * @oppfx 0x66
6502 * @opcpuid sse2
6503 * @opgroup og_sse2_simdint_datamove
6504 * @opxcpttype 1
6505 * @optest op1=1 op2=2 -> op1=2
6506 * @optest op1=0 op2=-42 -> op1=-42
6507 */
6508FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6509{
6510 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6512 if (IEM_IS_MODRM_REG_MODE(bRm))
6513 {
6514 /*
6515 * Register, register.
6516 */
6517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6518 IEM_MC_BEGIN(0, 0);
6519
6520 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6522
6523 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6524 IEM_GET_MODRM_RM(pVCpu, bRm));
6525 IEM_MC_ADVANCE_RIP();
6526 IEM_MC_END();
6527 }
6528 else
6529 {
6530 /*
6531 * Register, memory.
6532 */
6533 IEM_MC_BEGIN(0, 2);
6534 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6536
6537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6539 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6540 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6541
6542 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6543 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6544
6545 IEM_MC_ADVANCE_RIP();
6546 IEM_MC_END();
6547 }
6548 return VINF_SUCCESS;
6549}
6550
6551/**
6552 * @opcode 0x6f
6553 * @oppfx 0xf3
6554 * @opcpuid sse2
6555 * @opgroup og_sse2_simdint_datamove
6556 * @opxcpttype 4UA
6557 * @optest op1=1 op2=2 -> op1=2
6558 * @optest op1=0 op2=-42 -> op1=-42
6559 */
6560FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6561{
6562 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6564 if (IEM_IS_MODRM_REG_MODE(bRm))
6565 {
6566 /*
6567 * Register, register.
6568 */
6569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6570 IEM_MC_BEGIN(0, 0);
6571 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6572 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6573 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6574 IEM_GET_MODRM_RM(pVCpu, bRm));
6575 IEM_MC_ADVANCE_RIP();
6576 IEM_MC_END();
6577 }
6578 else
6579 {
6580 /*
6581 * Register, memory.
6582 */
6583 IEM_MC_BEGIN(0, 2);
6584 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6586
6587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6589 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6590 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6591 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6592 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6593
6594 IEM_MC_ADVANCE_RIP();
6595 IEM_MC_END();
6596 }
6597 return VINF_SUCCESS;
6598}
6599
6600
6601/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6602FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6603{
6604 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6606 if (IEM_IS_MODRM_REG_MODE(bRm))
6607 {
6608 /*
6609 * Register, register.
6610 */
6611 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6613
6614 IEM_MC_BEGIN(3, 0);
6615 IEM_MC_ARG(uint64_t *, pDst, 0);
6616 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6617 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6618 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6619 IEM_MC_PREPARE_FPU_USAGE();
6620 IEM_MC_FPU_TO_MMX_MODE();
6621
6622 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6623 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6624 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6625 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6626
6627 IEM_MC_ADVANCE_RIP();
6628 IEM_MC_END();
6629 }
6630 else
6631 {
6632 /*
6633 * Register, memory.
6634 */
6635 IEM_MC_BEGIN(3, 2);
6636 IEM_MC_ARG(uint64_t *, pDst, 0);
6637 IEM_MC_LOCAL(uint64_t, uSrc);
6638 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6640
6641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6642 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6643 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6645 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6646 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6647
6648 IEM_MC_PREPARE_FPU_USAGE();
6649 IEM_MC_FPU_TO_MMX_MODE();
6650
6651 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6652 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6653 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6654
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 }
6658 return VINF_SUCCESS;
6659}
6660
6661
6662/**
6663 * Common worker for SSE2 instructions on the forms:
6664 * pshufd xmm1, xmm2/mem128, imm8
6665 * pshufhw xmm1, xmm2/mem128, imm8
6666 * pshuflw xmm1, xmm2/mem128, imm8
6667 *
6668 * Proper alignment of the 128-bit operand is enforced.
6669 * Exceptions type 4. SSE2 cpuid checks.
6670 */
6671FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6672{
6673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6674 if (IEM_IS_MODRM_REG_MODE(bRm))
6675 {
6676 /*
6677 * Register, register.
6678 */
6679 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6681
6682 IEM_MC_BEGIN(3, 0);
6683 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6684 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6685 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6686 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6687 IEM_MC_PREPARE_SSE_USAGE();
6688 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6689 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6690 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6691 IEM_MC_ADVANCE_RIP();
6692 IEM_MC_END();
6693 }
6694 else
6695 {
6696 /*
6697 * Register, memory.
6698 */
6699 IEM_MC_BEGIN(3, 2);
6700 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6701 IEM_MC_LOCAL(RTUINT128U, uSrc);
6702 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6704
6705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6706 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6707 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6709 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6710
6711 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6712 IEM_MC_PREPARE_SSE_USAGE();
6713 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6714 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6715
6716 IEM_MC_ADVANCE_RIP();
6717 IEM_MC_END();
6718 }
6719 return VINF_SUCCESS;
6720}
6721
6722
6723/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6724FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6725{
6726 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6727 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6728}
6729
6730
6731/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6732FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6733{
6734 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6735 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6736}
6737
6738
6739/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6740FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6741{
6742 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6743 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6744}
6745
6746
6747/**
6748 * Common worker for MMX instructions of the form:
6749 * psrlw mm, imm8
6750 * psraw mm, imm8
6751 * psllw mm, imm8
6752 * psrld mm, imm8
6753 * psrad mm, imm8
6754 * pslld mm, imm8
6755 * psrlq mm, imm8
6756 * psllq mm, imm8
6757 *
6758 */
6759FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6760{
6761 if (IEM_IS_MODRM_REG_MODE(bRm))
6762 {
6763 /*
6764 * Register, immediate.
6765 */
6766 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6768
6769 IEM_MC_BEGIN(2, 0);
6770 IEM_MC_ARG(uint64_t *, pDst, 0);
6771 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6772 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6773 IEM_MC_PREPARE_FPU_USAGE();
6774 IEM_MC_FPU_TO_MMX_MODE();
6775
6776 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6777 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6778 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6779
6780 IEM_MC_ADVANCE_RIP();
6781 IEM_MC_END();
6782 }
6783 else
6784 {
6785 /*
6786 * Register, memory not supported.
6787 */
6788 /// @todo Caller already enforced register mode?!
6789 }
6790 return VINF_SUCCESS;
6791}
6792
6793
6794/**
6795 * Common worker for SSE2 instructions of the form:
6796 * psrlw xmm, imm8
6797 * psraw xmm, imm8
6798 * psllw xmm, imm8
6799 * psrld xmm, imm8
6800 * psrad xmm, imm8
6801 * pslld xmm, imm8
6802 * psrlq xmm, imm8
6803 * psllq xmm, imm8
6804 *
6805 */
6806FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6807{
6808 if (IEM_IS_MODRM_REG_MODE(bRm))
6809 {
6810 /*
6811 * Register, immediate.
6812 */
6813 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6815
6816 IEM_MC_BEGIN(2, 0);
6817 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6818 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6819 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6820 IEM_MC_PREPARE_SSE_USAGE();
6821 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6822 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6823 IEM_MC_ADVANCE_RIP();
6824 IEM_MC_END();
6825 }
6826 else
6827 {
6828 /*
6829 * Register, memory.
6830 */
6831 /// @todo Caller already enforced register mode?!
6832 }
6833 return VINF_SUCCESS;
6834}
6835
6836
6837/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6838FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6839{
6840// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6841 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6842}
6843
6844
6845/** Opcode 0x66 0x0f 0x71 11/2. */
6846FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6847{
6848// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6849 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6850}
6851
6852
6853/** Opcode 0x0f 0x71 11/4. */
6854FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6855{
6856// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6857 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6858}
6859
6860
6861/** Opcode 0x66 0x0f 0x71 11/4. */
6862FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6863{
6864// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6865 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6866}
6867
6868
6869/** Opcode 0x0f 0x71 11/6. */
6870FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6871{
6872// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6873 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6874}
6875
6876
6877/** Opcode 0x66 0x0f 0x71 11/6. */
6878FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6879{
6880// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6881 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6882}
6883
6884
6885/**
6886 * Group 12 jump table for register variant.
6887 */
6888IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6889{
6890 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6891 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6892 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6893 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6894 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6895 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6896 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6897 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6898};
6899AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6900
6901
6902/** Opcode 0x0f 0x71. */
6903FNIEMOP_DEF(iemOp_Grp12)
6904{
6905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6906 if (IEM_IS_MODRM_REG_MODE(bRm))
6907 /* register, register */
6908 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6909 + pVCpu->iem.s.idxPrefix], bRm);
6910 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6911}
6912
6913
6914/** Opcode 0x0f 0x72 11/2. */
6915FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6916{
6917// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6918 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6919}
6920
6921
6922/** Opcode 0x66 0x0f 0x72 11/2. */
6923FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6924{
6925// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6926 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6927}
6928
6929
6930/** Opcode 0x0f 0x72 11/4. */
6931FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6932{
6933// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6934 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6935}
6936
6937
6938/** Opcode 0x66 0x0f 0x72 11/4. */
6939FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6940{
6941// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6942 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6943}
6944
6945
6946/** Opcode 0x0f 0x72 11/6. */
6947FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6948{
6949// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6950 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6951}
6952
6953/** Opcode 0x66 0x0f 0x72 11/6. */
6954FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6955{
6956// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6957 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6958}
6959
6960
6961/**
6962 * Group 13 jump table for register variant.
6963 */
6964IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6965{
6966 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6967 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6968 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6969 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6970 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6971 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6972 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6973 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6974};
6975AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6976
6977/** Opcode 0x0f 0x72. */
6978FNIEMOP_DEF(iemOp_Grp13)
6979{
6980 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6981 if (IEM_IS_MODRM_REG_MODE(bRm))
6982 /* register, register */
6983 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6984 + pVCpu->iem.s.idxPrefix], bRm);
6985 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6986}
6987
6988
6989/** Opcode 0x0f 0x73 11/2. */
6990FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6991{
6992// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6993 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6994}
6995
6996
6997/** Opcode 0x66 0x0f 0x73 11/2. */
6998FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6999{
7000// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
7001 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
7002}
7003
7004
7005/** Opcode 0x66 0x0f 0x73 11/3. */
7006FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
7007{
7008// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
7009 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
7010}
7011
7012
7013/** Opcode 0x0f 0x73 11/6. */
7014FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
7015{
7016// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
7017 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
7018}
7019
7020
7021/** Opcode 0x66 0x0f 0x73 11/6. */
7022FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
7023{
7024// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
7025 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
7026}
7027
7028
7029/** Opcode 0x66 0x0f 0x73 11/7. */
7030FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
7031{
7032// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
7033 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
7034}
7035
7036/**
7037 * Group 14 jump table for register variant.
7038 */
7039IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
7040{
7041 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7042 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7043 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7044 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7045 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7046 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7047 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7048 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7049};
7050AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
7051
7052
7053/** Opcode 0x0f 0x73. */
7054FNIEMOP_DEF(iemOp_Grp14)
7055{
7056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7057 if (IEM_IS_MODRM_REG_MODE(bRm))
7058 /* register, register */
7059 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7060 + pVCpu->iem.s.idxPrefix], bRm);
7061 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7062}
7063
7064
7065/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7066FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7067{
7068 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7069 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7070}
7071
7072
7073/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7074FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7075{
7076 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7077 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7078}
7079
7080
7081/* Opcode 0xf3 0x0f 0x74 - invalid */
7082/* Opcode 0xf2 0x0f 0x74 - invalid */
7083
7084
7085/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7086FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7087{
7088 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7089 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7090}
7091
7092
7093/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7094FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7095{
7096 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7097 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7098}
7099
7100
7101/* Opcode 0xf3 0x0f 0x75 - invalid */
7102/* Opcode 0xf2 0x0f 0x75 - invalid */
7103
7104
7105/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7106FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7107{
7108 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7109 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7110}
7111
7112
7113/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7114FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7115{
7116 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7117 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7118}
7119
7120
7121/* Opcode 0xf3 0x0f 0x76 - invalid */
7122/* Opcode 0xf2 0x0f 0x76 - invalid */
7123
7124
7125/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7126FNIEMOP_DEF(iemOp_emms)
7127{
7128 IEMOP_MNEMONIC(emms, "emms");
7129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7130
7131 IEM_MC_BEGIN(0,0);
7132 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7133 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7134 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7135 IEM_MC_FPU_FROM_MMX_MODE();
7136 IEM_MC_ADVANCE_RIP();
7137 IEM_MC_END();
7138 return VINF_SUCCESS;
7139}
7140
7141/* Opcode 0x66 0x0f 0x77 - invalid */
7142/* Opcode 0xf3 0x0f 0x77 - invalid */
7143/* Opcode 0xf2 0x0f 0x77 - invalid */
7144
7145/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7146#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7147FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7148{
7149 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7150 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7151 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7152 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7153
7154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7155 if (IEM_IS_MODRM_REG_MODE(bRm))
7156 {
7157 /*
7158 * Register, register.
7159 */
7160 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7161 if (enmEffOpSize == IEMMODE_64BIT)
7162 {
7163 IEM_MC_BEGIN(2, 0);
7164 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7165 IEM_MC_ARG(uint64_t, u64Enc, 1);
7166 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7167 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7168 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7169 IEM_MC_END();
7170 }
7171 else
7172 {
7173 IEM_MC_BEGIN(2, 0);
7174 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7175 IEM_MC_ARG(uint32_t, u32Enc, 1);
7176 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7177 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7178 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
7179 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7180 IEM_MC_END();
7181 }
7182 }
7183 else
7184 {
7185 /*
7186 * Memory, register.
7187 */
7188 if (enmEffOpSize == IEMMODE_64BIT)
7189 {
7190 IEM_MC_BEGIN(3, 0);
7191 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7192 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7193 IEM_MC_ARG(uint64_t, u64Enc, 2);
7194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7195 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7196 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7197 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7198 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7199 IEM_MC_END();
7200 }
7201 else
7202 {
7203 IEM_MC_BEGIN(3, 0);
7204 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7205 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7206 IEM_MC_ARG(uint32_t, u32Enc, 2);
7207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7208 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7209 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7210 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7211 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7212 IEM_MC_END();
7213 }
7214 }
7215 return VINF_SUCCESS;
7216}
7217#else
7218FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7219#endif
7220
7221/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7222FNIEMOP_STUB(iemOp_AmdGrp17);
7223/* Opcode 0xf3 0x0f 0x78 - invalid */
7224/* Opcode 0xf2 0x0f 0x78 - invalid */
7225
7226/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7227#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7228FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7229{
7230 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7231 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7232 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7233 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7234
7235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7236 if (IEM_IS_MODRM_REG_MODE(bRm))
7237 {
7238 /*
7239 * Register, register.
7240 */
7241 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7242 if (enmEffOpSize == IEMMODE_64BIT)
7243 {
7244 IEM_MC_BEGIN(2, 0);
7245 IEM_MC_ARG(uint64_t, u64Val, 0);
7246 IEM_MC_ARG(uint64_t, u64Enc, 1);
7247 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7248 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7249 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
7250 IEM_MC_END();
7251 }
7252 else
7253 {
7254 IEM_MC_BEGIN(2, 0);
7255 IEM_MC_ARG(uint32_t, u32Val, 0);
7256 IEM_MC_ARG(uint32_t, u32Enc, 1);
7257 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7258 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7259 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
7260 IEM_MC_END();
7261 }
7262 }
7263 else
7264 {
7265 /*
7266 * Register, memory.
7267 */
7268 if (enmEffOpSize == IEMMODE_64BIT)
7269 {
7270 IEM_MC_BEGIN(3, 0);
7271 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7272 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7273 IEM_MC_ARG(uint64_t, u64Enc, 2);
7274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7275 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7276 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7277 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7278 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7279 IEM_MC_END();
7280 }
7281 else
7282 {
7283 IEM_MC_BEGIN(3, 0);
7284 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7285 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7286 IEM_MC_ARG(uint32_t, u32Enc, 2);
7287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7288 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7289 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7290 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7291 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7292 IEM_MC_END();
7293 }
7294 }
7295 return VINF_SUCCESS;
7296}
7297#else
7298FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7299#endif
7300/* Opcode 0x66 0x0f 0x79 - invalid */
7301/* Opcode 0xf3 0x0f 0x79 - invalid */
7302/* Opcode 0xf2 0x0f 0x79 - invalid */
7303
7304/* Opcode 0x0f 0x7a - invalid */
7305/* Opcode 0x66 0x0f 0x7a - invalid */
7306/* Opcode 0xf3 0x0f 0x7a - invalid */
7307/* Opcode 0xf2 0x0f 0x7a - invalid */
7308
7309/* Opcode 0x0f 0x7b - invalid */
7310/* Opcode 0x66 0x0f 0x7b - invalid */
7311/* Opcode 0xf3 0x0f 0x7b - invalid */
7312/* Opcode 0xf2 0x0f 0x7b - invalid */
7313
7314/* Opcode 0x0f 0x7c - invalid */
7315
7316
7317/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7318FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7319{
7320 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7321 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7322}
7323
7324
7325/* Opcode 0xf3 0x0f 0x7c - invalid */
7326
7327
7328/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7329FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7330{
7331 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7332 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7333}
7334
7335
7336/* Opcode 0x0f 0x7d - invalid */
7337
7338
7339/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7340FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7341{
7342 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7343 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7344}
7345
7346
7347/* Opcode 0xf3 0x0f 0x7d - invalid */
7348
7349
7350/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7351FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7352{
7353 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7354 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7355}
7356
7357
7358/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7359FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7360{
7361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7362 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7363 {
7364 /**
7365 * @opcode 0x7e
7366 * @opcodesub rex.w=1
7367 * @oppfx none
7368 * @opcpuid mmx
7369 * @opgroup og_mmx_datamove
7370 * @opxcpttype 5
7371 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7372 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7373 */
7374 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7375 if (IEM_IS_MODRM_REG_MODE(bRm))
7376 {
7377 /* greg64, MMX */
7378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7379 IEM_MC_BEGIN(0, 1);
7380 IEM_MC_LOCAL(uint64_t, u64Tmp);
7381
7382 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7383 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7384 IEM_MC_FPU_TO_MMX_MODE();
7385
7386 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7387 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7388
7389 IEM_MC_ADVANCE_RIP();
7390 IEM_MC_END();
7391 }
7392 else
7393 {
7394 /* [mem64], MMX */
7395 IEM_MC_BEGIN(0, 2);
7396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7397 IEM_MC_LOCAL(uint64_t, u64Tmp);
7398
7399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7401 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7402 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7403 IEM_MC_FPU_TO_MMX_MODE();
7404
7405 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7406 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7407
7408 IEM_MC_ADVANCE_RIP();
7409 IEM_MC_END();
7410 }
7411 }
7412 else
7413 {
7414 /**
7415 * @opdone
7416 * @opcode 0x7e
7417 * @opcodesub rex.w=0
7418 * @oppfx none
7419 * @opcpuid mmx
7420 * @opgroup og_mmx_datamove
7421 * @opxcpttype 5
7422 * @opfunction iemOp_movd_q_Pd_Ey
7423 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7424 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7425 */
7426 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7427 if (IEM_IS_MODRM_REG_MODE(bRm))
7428 {
7429 /* greg32, MMX */
7430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7431 IEM_MC_BEGIN(0, 1);
7432 IEM_MC_LOCAL(uint32_t, u32Tmp);
7433
7434 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7435 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7436 IEM_MC_FPU_TO_MMX_MODE();
7437
7438 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7439 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7440
7441 IEM_MC_ADVANCE_RIP();
7442 IEM_MC_END();
7443 }
7444 else
7445 {
7446 /* [mem32], MMX */
7447 IEM_MC_BEGIN(0, 2);
7448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7449 IEM_MC_LOCAL(uint32_t, u32Tmp);
7450
7451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7453 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7454 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7455 IEM_MC_FPU_TO_MMX_MODE();
7456
7457 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7458 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7459
7460 IEM_MC_ADVANCE_RIP();
7461 IEM_MC_END();
7462 }
7463 }
7464 return VINF_SUCCESS;
7465
7466}
7467
7468
7469FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7470{
7471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7472 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7473 {
7474 /**
7475 * @opcode 0x7e
7476 * @opcodesub rex.w=1
7477 * @oppfx 0x66
7478 * @opcpuid sse2
7479 * @opgroup og_sse2_simdint_datamove
7480 * @opxcpttype 5
7481 * @optest 64-bit / op1=1 op2=2 -> op1=2
7482 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7483 */
7484 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7485 if (IEM_IS_MODRM_REG_MODE(bRm))
7486 {
7487 /* greg64, XMM */
7488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7489 IEM_MC_BEGIN(0, 1);
7490 IEM_MC_LOCAL(uint64_t, u64Tmp);
7491
7492 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7493 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7494
7495 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7496 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7497
7498 IEM_MC_ADVANCE_RIP();
7499 IEM_MC_END();
7500 }
7501 else
7502 {
7503 /* [mem64], XMM */
7504 IEM_MC_BEGIN(0, 2);
7505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7506 IEM_MC_LOCAL(uint64_t, u64Tmp);
7507
7508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7510 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7511 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7512
7513 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7514 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7515
7516 IEM_MC_ADVANCE_RIP();
7517 IEM_MC_END();
7518 }
7519 }
7520 else
7521 {
7522 /**
7523 * @opdone
7524 * @opcode 0x7e
7525 * @opcodesub rex.w=0
7526 * @oppfx 0x66
7527 * @opcpuid sse2
7528 * @opgroup og_sse2_simdint_datamove
7529 * @opxcpttype 5
7530 * @opfunction iemOp_movd_q_Vy_Ey
7531 * @optest op1=1 op2=2 -> op1=2
7532 * @optest op1=0 op2=-42 -> op1=-42
7533 */
7534 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7535 if (IEM_IS_MODRM_REG_MODE(bRm))
7536 {
7537 /* greg32, XMM */
7538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7539 IEM_MC_BEGIN(0, 1);
7540 IEM_MC_LOCAL(uint32_t, u32Tmp);
7541
7542 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7543 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7544
7545 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7546 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7547
7548 IEM_MC_ADVANCE_RIP();
7549 IEM_MC_END();
7550 }
7551 else
7552 {
7553 /* [mem32], XMM */
7554 IEM_MC_BEGIN(0, 2);
7555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7556 IEM_MC_LOCAL(uint32_t, u32Tmp);
7557
7558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7560 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7561 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7562
7563 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7564 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7565
7566 IEM_MC_ADVANCE_RIP();
7567 IEM_MC_END();
7568 }
7569 }
7570 return VINF_SUCCESS;
7571
7572}
7573
7574/**
7575 * @opcode 0x7e
7576 * @oppfx 0xf3
7577 * @opcpuid sse2
7578 * @opgroup og_sse2_pcksclr_datamove
7579 * @opxcpttype none
7580 * @optest op1=1 op2=2 -> op1=2
7581 * @optest op1=0 op2=-42 -> op1=-42
7582 */
7583FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7584{
7585 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7587 if (IEM_IS_MODRM_REG_MODE(bRm))
7588 {
7589 /*
7590 * Register, register.
7591 */
7592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7593 IEM_MC_BEGIN(0, 2);
7594 IEM_MC_LOCAL(uint64_t, uSrc);
7595
7596 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7597 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7598
7599 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
7600 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7601
7602 IEM_MC_ADVANCE_RIP();
7603 IEM_MC_END();
7604 }
7605 else
7606 {
7607 /*
7608 * Memory, register.
7609 */
7610 IEM_MC_BEGIN(0, 2);
7611 IEM_MC_LOCAL(uint64_t, uSrc);
7612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7613
7614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7616 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7617 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7618
7619 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7620 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7621
7622 IEM_MC_ADVANCE_RIP();
7623 IEM_MC_END();
7624 }
7625 return VINF_SUCCESS;
7626}
7627
7628/* Opcode 0xf2 0x0f 0x7e - invalid */
7629
7630
7631/** Opcode 0x0f 0x7f - movq Qq, Pq */
7632FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7633{
7634 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7636 if (IEM_IS_MODRM_REG_MODE(bRm))
7637 {
7638 /*
7639 * Register, register.
7640 */
7641 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7642 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7644 IEM_MC_BEGIN(0, 1);
7645 IEM_MC_LOCAL(uint64_t, u64Tmp);
7646 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7647 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7648 IEM_MC_FPU_TO_MMX_MODE();
7649
7650 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7651 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7652
7653 IEM_MC_ADVANCE_RIP();
7654 IEM_MC_END();
7655 }
7656 else
7657 {
7658 /*
7659 * Memory, Register.
7660 */
7661 IEM_MC_BEGIN(0, 2);
7662 IEM_MC_LOCAL(uint64_t, u64Tmp);
7663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7664
7665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7667 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7668 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7669 IEM_MC_FPU_TO_MMX_MODE();
7670
7671 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7672 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7673
7674 IEM_MC_ADVANCE_RIP();
7675 IEM_MC_END();
7676 }
7677 return VINF_SUCCESS;
7678}
7679
7680/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7681FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7682{
7683 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7685 if (IEM_IS_MODRM_REG_MODE(bRm))
7686 {
7687 /*
7688 * Register, register.
7689 */
7690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7691 IEM_MC_BEGIN(0, 0);
7692 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7693 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7694 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7695 IEM_GET_MODRM_REG(pVCpu, bRm));
7696 IEM_MC_ADVANCE_RIP();
7697 IEM_MC_END();
7698 }
7699 else
7700 {
7701 /*
7702 * Register, memory.
7703 */
7704 IEM_MC_BEGIN(0, 2);
7705 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7707
7708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7710 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7711 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7712
7713 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7714 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7715
7716 IEM_MC_ADVANCE_RIP();
7717 IEM_MC_END();
7718 }
7719 return VINF_SUCCESS;
7720}
7721
7722/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7723FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7724{
7725 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7726 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7727 if (IEM_IS_MODRM_REG_MODE(bRm))
7728 {
7729 /*
7730 * Register, register.
7731 */
7732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7733 IEM_MC_BEGIN(0, 0);
7734 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7735 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7736 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7737 IEM_GET_MODRM_REG(pVCpu, bRm));
7738 IEM_MC_ADVANCE_RIP();
7739 IEM_MC_END();
7740 }
7741 else
7742 {
7743 /*
7744 * Register, memory.
7745 */
7746 IEM_MC_BEGIN(0, 2);
7747 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7749
7750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7752 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7753 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7754
7755 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7756 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7757
7758 IEM_MC_ADVANCE_RIP();
7759 IEM_MC_END();
7760 }
7761 return VINF_SUCCESS;
7762}
7763
7764/* Opcode 0xf2 0x0f 0x7f - invalid */
7765
7766
7767
7768/** Opcode 0x0f 0x80. */
7769FNIEMOP_DEF(iemOp_jo_Jv)
7770{
7771 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7772 IEMOP_HLP_MIN_386();
7773 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7774 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7775 {
7776 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7778
7779 IEM_MC_BEGIN(0, 0);
7780 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7781 IEM_MC_REL_JMP_S16(i16Imm);
7782 } IEM_MC_ELSE() {
7783 IEM_MC_ADVANCE_RIP();
7784 } IEM_MC_ENDIF();
7785 IEM_MC_END();
7786 }
7787 else
7788 {
7789 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7791
7792 IEM_MC_BEGIN(0, 0);
7793 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7794 IEM_MC_REL_JMP_S32(i32Imm);
7795 } IEM_MC_ELSE() {
7796 IEM_MC_ADVANCE_RIP();
7797 } IEM_MC_ENDIF();
7798 IEM_MC_END();
7799 }
7800 return VINF_SUCCESS;
7801}
7802
7803
7804/** Opcode 0x0f 0x81. */
7805FNIEMOP_DEF(iemOp_jno_Jv)
7806{
7807 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7808 IEMOP_HLP_MIN_386();
7809 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7810 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7811 {
7812 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7814
7815 IEM_MC_BEGIN(0, 0);
7816 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7817 IEM_MC_ADVANCE_RIP();
7818 } IEM_MC_ELSE() {
7819 IEM_MC_REL_JMP_S16(i16Imm);
7820 } IEM_MC_ENDIF();
7821 IEM_MC_END();
7822 }
7823 else
7824 {
7825 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7827
7828 IEM_MC_BEGIN(0, 0);
7829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7830 IEM_MC_ADVANCE_RIP();
7831 } IEM_MC_ELSE() {
7832 IEM_MC_REL_JMP_S32(i32Imm);
7833 } IEM_MC_ENDIF();
7834 IEM_MC_END();
7835 }
7836 return VINF_SUCCESS;
7837}
7838
7839
7840/** Opcode 0x0f 0x82. */
7841FNIEMOP_DEF(iemOp_jc_Jv)
7842{
7843 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7844 IEMOP_HLP_MIN_386();
7845 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7846 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7847 {
7848 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7850
7851 IEM_MC_BEGIN(0, 0);
7852 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7853 IEM_MC_REL_JMP_S16(i16Imm);
7854 } IEM_MC_ELSE() {
7855 IEM_MC_ADVANCE_RIP();
7856 } IEM_MC_ENDIF();
7857 IEM_MC_END();
7858 }
7859 else
7860 {
7861 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7863
7864 IEM_MC_BEGIN(0, 0);
7865 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7866 IEM_MC_REL_JMP_S32(i32Imm);
7867 } IEM_MC_ELSE() {
7868 IEM_MC_ADVANCE_RIP();
7869 } IEM_MC_ENDIF();
7870 IEM_MC_END();
7871 }
7872 return VINF_SUCCESS;
7873}
7874
7875
7876/** Opcode 0x0f 0x83. */
7877FNIEMOP_DEF(iemOp_jnc_Jv)
7878{
7879 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7880 IEMOP_HLP_MIN_386();
7881 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7882 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7883 {
7884 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7886
7887 IEM_MC_BEGIN(0, 0);
7888 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7889 IEM_MC_ADVANCE_RIP();
7890 } IEM_MC_ELSE() {
7891 IEM_MC_REL_JMP_S16(i16Imm);
7892 } IEM_MC_ENDIF();
7893 IEM_MC_END();
7894 }
7895 else
7896 {
7897 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7899
7900 IEM_MC_BEGIN(0, 0);
7901 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7902 IEM_MC_ADVANCE_RIP();
7903 } IEM_MC_ELSE() {
7904 IEM_MC_REL_JMP_S32(i32Imm);
7905 } IEM_MC_ENDIF();
7906 IEM_MC_END();
7907 }
7908 return VINF_SUCCESS;
7909}
7910
7911
7912/** Opcode 0x0f 0x84. */
7913FNIEMOP_DEF(iemOp_je_Jv)
7914{
7915 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7916 IEMOP_HLP_MIN_386();
7917 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7918 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7919 {
7920 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7922
7923 IEM_MC_BEGIN(0, 0);
7924 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7925 IEM_MC_REL_JMP_S16(i16Imm);
7926 } IEM_MC_ELSE() {
7927 IEM_MC_ADVANCE_RIP();
7928 } IEM_MC_ENDIF();
7929 IEM_MC_END();
7930 }
7931 else
7932 {
7933 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7935
7936 IEM_MC_BEGIN(0, 0);
7937 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7938 IEM_MC_REL_JMP_S32(i32Imm);
7939 } IEM_MC_ELSE() {
7940 IEM_MC_ADVANCE_RIP();
7941 } IEM_MC_ENDIF();
7942 IEM_MC_END();
7943 }
7944 return VINF_SUCCESS;
7945}
7946
7947
7948/** Opcode 0x0f 0x85. */
7949FNIEMOP_DEF(iemOp_jne_Jv)
7950{
7951 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7952 IEMOP_HLP_MIN_386();
7953 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7954 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7955 {
7956 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7958
7959 IEM_MC_BEGIN(0, 0);
7960 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7961 IEM_MC_ADVANCE_RIP();
7962 } IEM_MC_ELSE() {
7963 IEM_MC_REL_JMP_S16(i16Imm);
7964 } IEM_MC_ENDIF();
7965 IEM_MC_END();
7966 }
7967 else
7968 {
7969 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7971
7972 IEM_MC_BEGIN(0, 0);
7973 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7974 IEM_MC_ADVANCE_RIP();
7975 } IEM_MC_ELSE() {
7976 IEM_MC_REL_JMP_S32(i32Imm);
7977 } IEM_MC_ENDIF();
7978 IEM_MC_END();
7979 }
7980 return VINF_SUCCESS;
7981}
7982
7983
7984/** Opcode 0x0f 0x86. */
7985FNIEMOP_DEF(iemOp_jbe_Jv)
7986{
7987 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7988 IEMOP_HLP_MIN_386();
7989 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7990 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7991 {
7992 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7994
7995 IEM_MC_BEGIN(0, 0);
7996 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7997 IEM_MC_REL_JMP_S16(i16Imm);
7998 } IEM_MC_ELSE() {
7999 IEM_MC_ADVANCE_RIP();
8000 } IEM_MC_ENDIF();
8001 IEM_MC_END();
8002 }
8003 else
8004 {
8005 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8007
8008 IEM_MC_BEGIN(0, 0);
8009 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8010 IEM_MC_REL_JMP_S32(i32Imm);
8011 } IEM_MC_ELSE() {
8012 IEM_MC_ADVANCE_RIP();
8013 } IEM_MC_ENDIF();
8014 IEM_MC_END();
8015 }
8016 return VINF_SUCCESS;
8017}
8018
8019
8020/** Opcode 0x0f 0x87. */
8021FNIEMOP_DEF(iemOp_jnbe_Jv)
8022{
8023 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
8024 IEMOP_HLP_MIN_386();
8025 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8026 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8027 {
8028 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8030
8031 IEM_MC_BEGIN(0, 0);
8032 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8033 IEM_MC_ADVANCE_RIP();
8034 } IEM_MC_ELSE() {
8035 IEM_MC_REL_JMP_S16(i16Imm);
8036 } IEM_MC_ENDIF();
8037 IEM_MC_END();
8038 }
8039 else
8040 {
8041 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8043
8044 IEM_MC_BEGIN(0, 0);
8045 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8046 IEM_MC_ADVANCE_RIP();
8047 } IEM_MC_ELSE() {
8048 IEM_MC_REL_JMP_S32(i32Imm);
8049 } IEM_MC_ENDIF();
8050 IEM_MC_END();
8051 }
8052 return VINF_SUCCESS;
8053}
8054
8055
8056/** Opcode 0x0f 0x88. */
8057FNIEMOP_DEF(iemOp_js_Jv)
8058{
8059 IEMOP_MNEMONIC(js_Jv, "js Jv");
8060 IEMOP_HLP_MIN_386();
8061 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8062 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8063 {
8064 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8066
8067 IEM_MC_BEGIN(0, 0);
8068 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8069 IEM_MC_REL_JMP_S16(i16Imm);
8070 } IEM_MC_ELSE() {
8071 IEM_MC_ADVANCE_RIP();
8072 } IEM_MC_ENDIF();
8073 IEM_MC_END();
8074 }
8075 else
8076 {
8077 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8079
8080 IEM_MC_BEGIN(0, 0);
8081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8082 IEM_MC_REL_JMP_S32(i32Imm);
8083 } IEM_MC_ELSE() {
8084 IEM_MC_ADVANCE_RIP();
8085 } IEM_MC_ENDIF();
8086 IEM_MC_END();
8087 }
8088 return VINF_SUCCESS;
8089}
8090
8091
8092/** Opcode 0x0f 0x89. */
8093FNIEMOP_DEF(iemOp_jns_Jv)
8094{
8095 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8096 IEMOP_HLP_MIN_386();
8097 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8098 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8099 {
8100 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8102
8103 IEM_MC_BEGIN(0, 0);
8104 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8105 IEM_MC_ADVANCE_RIP();
8106 } IEM_MC_ELSE() {
8107 IEM_MC_REL_JMP_S16(i16Imm);
8108 } IEM_MC_ENDIF();
8109 IEM_MC_END();
8110 }
8111 else
8112 {
8113 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8115
8116 IEM_MC_BEGIN(0, 0);
8117 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8118 IEM_MC_ADVANCE_RIP();
8119 } IEM_MC_ELSE() {
8120 IEM_MC_REL_JMP_S32(i32Imm);
8121 } IEM_MC_ENDIF();
8122 IEM_MC_END();
8123 }
8124 return VINF_SUCCESS;
8125}
8126
8127
8128/** Opcode 0x0f 0x8a. */
8129FNIEMOP_DEF(iemOp_jp_Jv)
8130{
8131 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8132 IEMOP_HLP_MIN_386();
8133 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8134 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8135 {
8136 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8138
8139 IEM_MC_BEGIN(0, 0);
8140 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8141 IEM_MC_REL_JMP_S16(i16Imm);
8142 } IEM_MC_ELSE() {
8143 IEM_MC_ADVANCE_RIP();
8144 } IEM_MC_ENDIF();
8145 IEM_MC_END();
8146 }
8147 else
8148 {
8149 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8151
8152 IEM_MC_BEGIN(0, 0);
8153 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8154 IEM_MC_REL_JMP_S32(i32Imm);
8155 } IEM_MC_ELSE() {
8156 IEM_MC_ADVANCE_RIP();
8157 } IEM_MC_ENDIF();
8158 IEM_MC_END();
8159 }
8160 return VINF_SUCCESS;
8161}
8162
8163
8164/** Opcode 0x0f 0x8b. */
8165FNIEMOP_DEF(iemOp_jnp_Jv)
8166{
8167 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8168 IEMOP_HLP_MIN_386();
8169 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8170 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8171 {
8172 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8174
8175 IEM_MC_BEGIN(0, 0);
8176 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8177 IEM_MC_ADVANCE_RIP();
8178 } IEM_MC_ELSE() {
8179 IEM_MC_REL_JMP_S16(i16Imm);
8180 } IEM_MC_ENDIF();
8181 IEM_MC_END();
8182 }
8183 else
8184 {
8185 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8187
8188 IEM_MC_BEGIN(0, 0);
8189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8190 IEM_MC_ADVANCE_RIP();
8191 } IEM_MC_ELSE() {
8192 IEM_MC_REL_JMP_S32(i32Imm);
8193 } IEM_MC_ENDIF();
8194 IEM_MC_END();
8195 }
8196 return VINF_SUCCESS;
8197}
8198
8199
8200/** Opcode 0x0f 0x8c. */
8201FNIEMOP_DEF(iemOp_jl_Jv)
8202{
8203 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8204 IEMOP_HLP_MIN_386();
8205 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8206 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8207 {
8208 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8210
8211 IEM_MC_BEGIN(0, 0);
8212 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8213 IEM_MC_REL_JMP_S16(i16Imm);
8214 } IEM_MC_ELSE() {
8215 IEM_MC_ADVANCE_RIP();
8216 } IEM_MC_ENDIF();
8217 IEM_MC_END();
8218 }
8219 else
8220 {
8221 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8223
8224 IEM_MC_BEGIN(0, 0);
8225 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8226 IEM_MC_REL_JMP_S32(i32Imm);
8227 } IEM_MC_ELSE() {
8228 IEM_MC_ADVANCE_RIP();
8229 } IEM_MC_ENDIF();
8230 IEM_MC_END();
8231 }
8232 return VINF_SUCCESS;
8233}
8234
8235
8236/** Opcode 0x0f 0x8d. */
8237FNIEMOP_DEF(iemOp_jnl_Jv)
8238{
8239 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8240 IEMOP_HLP_MIN_386();
8241 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8242 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8243 {
8244 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8246
8247 IEM_MC_BEGIN(0, 0);
8248 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8249 IEM_MC_ADVANCE_RIP();
8250 } IEM_MC_ELSE() {
8251 IEM_MC_REL_JMP_S16(i16Imm);
8252 } IEM_MC_ENDIF();
8253 IEM_MC_END();
8254 }
8255 else
8256 {
8257 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8259
8260 IEM_MC_BEGIN(0, 0);
8261 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8262 IEM_MC_ADVANCE_RIP();
8263 } IEM_MC_ELSE() {
8264 IEM_MC_REL_JMP_S32(i32Imm);
8265 } IEM_MC_ENDIF();
8266 IEM_MC_END();
8267 }
8268 return VINF_SUCCESS;
8269}
8270
8271
8272/** Opcode 0x0f 0x8e. */
8273FNIEMOP_DEF(iemOp_jle_Jv)
8274{
8275 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8276 IEMOP_HLP_MIN_386();
8277 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8278 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8279 {
8280 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8282
8283 IEM_MC_BEGIN(0, 0);
8284 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8285 IEM_MC_REL_JMP_S16(i16Imm);
8286 } IEM_MC_ELSE() {
8287 IEM_MC_ADVANCE_RIP();
8288 } IEM_MC_ENDIF();
8289 IEM_MC_END();
8290 }
8291 else
8292 {
8293 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8295
8296 IEM_MC_BEGIN(0, 0);
8297 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8298 IEM_MC_REL_JMP_S32(i32Imm);
8299 } IEM_MC_ELSE() {
8300 IEM_MC_ADVANCE_RIP();
8301 } IEM_MC_ENDIF();
8302 IEM_MC_END();
8303 }
8304 return VINF_SUCCESS;
8305}
8306
8307
8308/** Opcode 0x0f 0x8f. */
8309FNIEMOP_DEF(iemOp_jnle_Jv)
8310{
8311 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8312 IEMOP_HLP_MIN_386();
8313 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8314 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8315 {
8316 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8318
8319 IEM_MC_BEGIN(0, 0);
8320 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8321 IEM_MC_ADVANCE_RIP();
8322 } IEM_MC_ELSE() {
8323 IEM_MC_REL_JMP_S16(i16Imm);
8324 } IEM_MC_ENDIF();
8325 IEM_MC_END();
8326 }
8327 else
8328 {
8329 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8331
8332 IEM_MC_BEGIN(0, 0);
8333 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8334 IEM_MC_ADVANCE_RIP();
8335 } IEM_MC_ELSE() {
8336 IEM_MC_REL_JMP_S32(i32Imm);
8337 } IEM_MC_ENDIF();
8338 IEM_MC_END();
8339 }
8340 return VINF_SUCCESS;
8341}
8342
8343
8344/** Opcode 0x0f 0x90. */
8345FNIEMOP_DEF(iemOp_seto_Eb)
8346{
8347 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8348 IEMOP_HLP_MIN_386();
8349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8350
8351 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8352 * any way. AMD says it's "unused", whatever that means. We're
8353 * ignoring for now. */
8354 if (IEM_IS_MODRM_REG_MODE(bRm))
8355 {
8356 /* register target */
8357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8358 IEM_MC_BEGIN(0, 0);
8359 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8360 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8361 } IEM_MC_ELSE() {
8362 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8363 } IEM_MC_ENDIF();
8364 IEM_MC_ADVANCE_RIP();
8365 IEM_MC_END();
8366 }
8367 else
8368 {
8369 /* memory target */
8370 IEM_MC_BEGIN(0, 1);
8371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8374 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8376 } IEM_MC_ELSE() {
8377 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8378 } IEM_MC_ENDIF();
8379 IEM_MC_ADVANCE_RIP();
8380 IEM_MC_END();
8381 }
8382 return VINF_SUCCESS;
8383}
8384
8385
8386/** Opcode 0x0f 0x91. */
8387FNIEMOP_DEF(iemOp_setno_Eb)
8388{
8389 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8390 IEMOP_HLP_MIN_386();
8391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8392
8393 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8394 * any way. AMD says it's "unused", whatever that means. We're
8395 * ignoring for now. */
8396 if (IEM_IS_MODRM_REG_MODE(bRm))
8397 {
8398 /* register target */
8399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8400 IEM_MC_BEGIN(0, 0);
8401 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8402 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8403 } IEM_MC_ELSE() {
8404 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8405 } IEM_MC_ENDIF();
8406 IEM_MC_ADVANCE_RIP();
8407 IEM_MC_END();
8408 }
8409 else
8410 {
8411 /* memory target */
8412 IEM_MC_BEGIN(0, 1);
8413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8418 } IEM_MC_ELSE() {
8419 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8420 } IEM_MC_ENDIF();
8421 IEM_MC_ADVANCE_RIP();
8422 IEM_MC_END();
8423 }
8424 return VINF_SUCCESS;
8425}
8426
8427
8428/** Opcode 0x0f 0x92. */
8429FNIEMOP_DEF(iemOp_setc_Eb)
8430{
8431 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8432 IEMOP_HLP_MIN_386();
8433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8434
8435 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8436 * any way. AMD says it's "unused", whatever that means. We're
8437 * ignoring for now. */
8438 if (IEM_IS_MODRM_REG_MODE(bRm))
8439 {
8440 /* register target */
8441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8442 IEM_MC_BEGIN(0, 0);
8443 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8444 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8445 } IEM_MC_ELSE() {
8446 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8447 } IEM_MC_ENDIF();
8448 IEM_MC_ADVANCE_RIP();
8449 IEM_MC_END();
8450 }
8451 else
8452 {
8453 /* memory target */
8454 IEM_MC_BEGIN(0, 1);
8455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8458 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8460 } IEM_MC_ELSE() {
8461 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8462 } IEM_MC_ENDIF();
8463 IEM_MC_ADVANCE_RIP();
8464 IEM_MC_END();
8465 }
8466 return VINF_SUCCESS;
8467}
8468
8469
8470/** Opcode 0x0f 0x93. */
8471FNIEMOP_DEF(iemOp_setnc_Eb)
8472{
8473 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8474 IEMOP_HLP_MIN_386();
8475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8476
8477 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8478 * any way. AMD says it's "unused", whatever that means. We're
8479 * ignoring for now. */
8480 if (IEM_IS_MODRM_REG_MODE(bRm))
8481 {
8482 /* register target */
8483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8484 IEM_MC_BEGIN(0, 0);
8485 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8486 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8487 } IEM_MC_ELSE() {
8488 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8489 } IEM_MC_ENDIF();
8490 IEM_MC_ADVANCE_RIP();
8491 IEM_MC_END();
8492 }
8493 else
8494 {
8495 /* memory target */
8496 IEM_MC_BEGIN(0, 1);
8497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8502 } IEM_MC_ELSE() {
8503 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8504 } IEM_MC_ENDIF();
8505 IEM_MC_ADVANCE_RIP();
8506 IEM_MC_END();
8507 }
8508 return VINF_SUCCESS;
8509}
8510
8511
8512/** Opcode 0x0f 0x94. */
8513FNIEMOP_DEF(iemOp_sete_Eb)
8514{
8515 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8516 IEMOP_HLP_MIN_386();
8517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8518
8519 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8520 * any way. AMD says it's "unused", whatever that means. We're
8521 * ignoring for now. */
8522 if (IEM_IS_MODRM_REG_MODE(bRm))
8523 {
8524 /* register target */
8525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8526 IEM_MC_BEGIN(0, 0);
8527 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8528 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8529 } IEM_MC_ELSE() {
8530 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8531 } IEM_MC_ENDIF();
8532 IEM_MC_ADVANCE_RIP();
8533 IEM_MC_END();
8534 }
8535 else
8536 {
8537 /* memory target */
8538 IEM_MC_BEGIN(0, 1);
8539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8542 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8544 } IEM_MC_ELSE() {
8545 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8546 } IEM_MC_ENDIF();
8547 IEM_MC_ADVANCE_RIP();
8548 IEM_MC_END();
8549 }
8550 return VINF_SUCCESS;
8551}
8552
8553
8554/** Opcode 0x0f 0x95. */
8555FNIEMOP_DEF(iemOp_setne_Eb)
8556{
8557 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8558 IEMOP_HLP_MIN_386();
8559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8560
8561 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8562 * any way. AMD says it's "unused", whatever that means. We're
8563 * ignoring for now. */
8564 if (IEM_IS_MODRM_REG_MODE(bRm))
8565 {
8566 /* register target */
8567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8568 IEM_MC_BEGIN(0, 0);
8569 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8570 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8571 } IEM_MC_ELSE() {
8572 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8573 } IEM_MC_ENDIF();
8574 IEM_MC_ADVANCE_RIP();
8575 IEM_MC_END();
8576 }
8577 else
8578 {
8579 /* memory target */
8580 IEM_MC_BEGIN(0, 1);
8581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8584 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8586 } IEM_MC_ELSE() {
8587 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8588 } IEM_MC_ENDIF();
8589 IEM_MC_ADVANCE_RIP();
8590 IEM_MC_END();
8591 }
8592 return VINF_SUCCESS;
8593}
8594
8595
8596/** Opcode 0x0f 0x96. */
8597FNIEMOP_DEF(iemOp_setbe_Eb)
8598{
8599 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8600 IEMOP_HLP_MIN_386();
8601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8602
8603 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8604 * any way. AMD says it's "unused", whatever that means. We're
8605 * ignoring for now. */
8606 if (IEM_IS_MODRM_REG_MODE(bRm))
8607 {
8608 /* register target */
8609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8610 IEM_MC_BEGIN(0, 0);
8611 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8612 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8613 } IEM_MC_ELSE() {
8614 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8615 } IEM_MC_ENDIF();
8616 IEM_MC_ADVANCE_RIP();
8617 IEM_MC_END();
8618 }
8619 else
8620 {
8621 /* memory target */
8622 IEM_MC_BEGIN(0, 1);
8623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8626 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8628 } IEM_MC_ELSE() {
8629 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8630 } IEM_MC_ENDIF();
8631 IEM_MC_ADVANCE_RIP();
8632 IEM_MC_END();
8633 }
8634 return VINF_SUCCESS;
8635}
8636
8637
8638/** Opcode 0x0f 0x97. */
8639FNIEMOP_DEF(iemOp_setnbe_Eb)
8640{
8641 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8642 IEMOP_HLP_MIN_386();
8643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8644
8645 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8646 * any way. AMD says it's "unused", whatever that means. We're
8647 * ignoring for now. */
8648 if (IEM_IS_MODRM_REG_MODE(bRm))
8649 {
8650 /* register target */
8651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8652 IEM_MC_BEGIN(0, 0);
8653 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8654 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8655 } IEM_MC_ELSE() {
8656 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8657 } IEM_MC_ENDIF();
8658 IEM_MC_ADVANCE_RIP();
8659 IEM_MC_END();
8660 }
8661 else
8662 {
8663 /* memory target */
8664 IEM_MC_BEGIN(0, 1);
8665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8668 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8669 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8670 } IEM_MC_ELSE() {
8671 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8672 } IEM_MC_ENDIF();
8673 IEM_MC_ADVANCE_RIP();
8674 IEM_MC_END();
8675 }
8676 return VINF_SUCCESS;
8677}
8678
8679
8680/** Opcode 0x0f 0x98. */
8681FNIEMOP_DEF(iemOp_sets_Eb)
8682{
8683 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8684 IEMOP_HLP_MIN_386();
8685 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8686
8687 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8688 * any way. AMD says it's "unused", whatever that means. We're
8689 * ignoring for now. */
8690 if (IEM_IS_MODRM_REG_MODE(bRm))
8691 {
8692 /* register target */
8693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8694 IEM_MC_BEGIN(0, 0);
8695 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8696 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8697 } IEM_MC_ELSE() {
8698 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8699 } IEM_MC_ENDIF();
8700 IEM_MC_ADVANCE_RIP();
8701 IEM_MC_END();
8702 }
8703 else
8704 {
8705 /* memory target */
8706 IEM_MC_BEGIN(0, 1);
8707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8710 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8711 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8712 } IEM_MC_ELSE() {
8713 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8714 } IEM_MC_ENDIF();
8715 IEM_MC_ADVANCE_RIP();
8716 IEM_MC_END();
8717 }
8718 return VINF_SUCCESS;
8719}
8720
8721
8722/** Opcode 0x0f 0x99. */
8723FNIEMOP_DEF(iemOp_setns_Eb)
8724{
8725 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8726 IEMOP_HLP_MIN_386();
8727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8728
8729 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8730 * any way. AMD says it's "unused", whatever that means. We're
8731 * ignoring for now. */
8732 if (IEM_IS_MODRM_REG_MODE(bRm))
8733 {
8734 /* register target */
8735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8736 IEM_MC_BEGIN(0, 0);
8737 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8738 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8739 } IEM_MC_ELSE() {
8740 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8741 } IEM_MC_ENDIF();
8742 IEM_MC_ADVANCE_RIP();
8743 IEM_MC_END();
8744 }
8745 else
8746 {
8747 /* memory target */
8748 IEM_MC_BEGIN(0, 1);
8749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8752 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8753 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8754 } IEM_MC_ELSE() {
8755 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8756 } IEM_MC_ENDIF();
8757 IEM_MC_ADVANCE_RIP();
8758 IEM_MC_END();
8759 }
8760 return VINF_SUCCESS;
8761}
8762
8763
8764/** Opcode 0x0f 0x9a. */
8765FNIEMOP_DEF(iemOp_setp_Eb)
8766{
8767 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8768 IEMOP_HLP_MIN_386();
8769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8770
8771 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8772 * any way. AMD says it's "unused", whatever that means. We're
8773 * ignoring for now. */
8774 if (IEM_IS_MODRM_REG_MODE(bRm))
8775 {
8776 /* register target */
8777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8778 IEM_MC_BEGIN(0, 0);
8779 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8780 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8781 } IEM_MC_ELSE() {
8782 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8783 } IEM_MC_ENDIF();
8784 IEM_MC_ADVANCE_RIP();
8785 IEM_MC_END();
8786 }
8787 else
8788 {
8789 /* memory target */
8790 IEM_MC_BEGIN(0, 1);
8791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8794 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8795 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8796 } IEM_MC_ELSE() {
8797 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8798 } IEM_MC_ENDIF();
8799 IEM_MC_ADVANCE_RIP();
8800 IEM_MC_END();
8801 }
8802 return VINF_SUCCESS;
8803}
8804
8805
8806/** Opcode 0x0f 0x9b. */
8807FNIEMOP_DEF(iemOp_setnp_Eb)
8808{
8809 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8810 IEMOP_HLP_MIN_386();
8811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8812
8813 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8814 * any way. AMD says it's "unused", whatever that means. We're
8815 * ignoring for now. */
8816 if (IEM_IS_MODRM_REG_MODE(bRm))
8817 {
8818 /* register target */
8819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8820 IEM_MC_BEGIN(0, 0);
8821 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8822 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8823 } IEM_MC_ELSE() {
8824 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8825 } IEM_MC_ENDIF();
8826 IEM_MC_ADVANCE_RIP();
8827 IEM_MC_END();
8828 }
8829 else
8830 {
8831 /* memory target */
8832 IEM_MC_BEGIN(0, 1);
8833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8836 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8837 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8838 } IEM_MC_ELSE() {
8839 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8840 } IEM_MC_ENDIF();
8841 IEM_MC_ADVANCE_RIP();
8842 IEM_MC_END();
8843 }
8844 return VINF_SUCCESS;
8845}
8846
8847
8848/** Opcode 0x0f 0x9c. */
8849FNIEMOP_DEF(iemOp_setl_Eb)
8850{
8851 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8852 IEMOP_HLP_MIN_386();
8853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8854
8855 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8856 * any way. AMD says it's "unused", whatever that means. We're
8857 * ignoring for now. */
8858 if (IEM_IS_MODRM_REG_MODE(bRm))
8859 {
8860 /* register target */
8861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8862 IEM_MC_BEGIN(0, 0);
8863 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8864 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8865 } IEM_MC_ELSE() {
8866 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8867 } IEM_MC_ENDIF();
8868 IEM_MC_ADVANCE_RIP();
8869 IEM_MC_END();
8870 }
8871 else
8872 {
8873 /* memory target */
8874 IEM_MC_BEGIN(0, 1);
8875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8878 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8879 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8880 } IEM_MC_ELSE() {
8881 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8882 } IEM_MC_ENDIF();
8883 IEM_MC_ADVANCE_RIP();
8884 IEM_MC_END();
8885 }
8886 return VINF_SUCCESS;
8887}
8888
8889
8890/** Opcode 0x0f 0x9d. */
8891FNIEMOP_DEF(iemOp_setnl_Eb)
8892{
8893 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8894 IEMOP_HLP_MIN_386();
8895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8896
8897 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8898 * any way. AMD says it's "unused", whatever that means. We're
8899 * ignoring for now. */
8900 if (IEM_IS_MODRM_REG_MODE(bRm))
8901 {
8902 /* register target */
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904 IEM_MC_BEGIN(0, 0);
8905 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8906 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8907 } IEM_MC_ELSE() {
8908 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8909 } IEM_MC_ENDIF();
8910 IEM_MC_ADVANCE_RIP();
8911 IEM_MC_END();
8912 }
8913 else
8914 {
8915 /* memory target */
8916 IEM_MC_BEGIN(0, 1);
8917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8920 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8921 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8922 } IEM_MC_ELSE() {
8923 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8924 } IEM_MC_ENDIF();
8925 IEM_MC_ADVANCE_RIP();
8926 IEM_MC_END();
8927 }
8928 return VINF_SUCCESS;
8929}
8930
8931
8932/** Opcode 0x0f 0x9e. */
8933FNIEMOP_DEF(iemOp_setle_Eb)
8934{
8935 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8936 IEMOP_HLP_MIN_386();
8937 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8938
8939 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8940 * any way. AMD says it's "unused", whatever that means. We're
8941 * ignoring for now. */
8942 if (IEM_IS_MODRM_REG_MODE(bRm))
8943 {
8944 /* register target */
8945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8946 IEM_MC_BEGIN(0, 0);
8947 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8948 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8949 } IEM_MC_ELSE() {
8950 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8951 } IEM_MC_ENDIF();
8952 IEM_MC_ADVANCE_RIP();
8953 IEM_MC_END();
8954 }
8955 else
8956 {
8957 /* memory target */
8958 IEM_MC_BEGIN(0, 1);
8959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8962 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8963 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8964 } IEM_MC_ELSE() {
8965 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8966 } IEM_MC_ENDIF();
8967 IEM_MC_ADVANCE_RIP();
8968 IEM_MC_END();
8969 }
8970 return VINF_SUCCESS;
8971}
8972
8973
8974/** Opcode 0x0f 0x9f. */
8975FNIEMOP_DEF(iemOp_setnle_Eb)
8976{
8977 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8978 IEMOP_HLP_MIN_386();
8979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8980
8981 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8982 * any way. AMD says it's "unused", whatever that means. We're
8983 * ignoring for now. */
8984 if (IEM_IS_MODRM_REG_MODE(bRm))
8985 {
8986 /* register target */
8987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8988 IEM_MC_BEGIN(0, 0);
8989 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8990 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8991 } IEM_MC_ELSE() {
8992 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8993 } IEM_MC_ENDIF();
8994 IEM_MC_ADVANCE_RIP();
8995 IEM_MC_END();
8996 }
8997 else
8998 {
8999 /* memory target */
9000 IEM_MC_BEGIN(0, 1);
9001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9004 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9005 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9006 } IEM_MC_ELSE() {
9007 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
9008 } IEM_MC_ENDIF();
9009 IEM_MC_ADVANCE_RIP();
9010 IEM_MC_END();
9011 }
9012 return VINF_SUCCESS;
9013}
9014
9015
9016/**
9017 * Common 'push segment-register' helper.
9018 */
9019FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
9020{
9021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9022 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
9023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9024
9025 switch (pVCpu->iem.s.enmEffOpSize)
9026 {
9027 case IEMMODE_16BIT:
9028 IEM_MC_BEGIN(0, 1);
9029 IEM_MC_LOCAL(uint16_t, u16Value);
9030 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
9031 IEM_MC_PUSH_U16(u16Value);
9032 IEM_MC_ADVANCE_RIP();
9033 IEM_MC_END();
9034 break;
9035
9036 case IEMMODE_32BIT:
9037 IEM_MC_BEGIN(0, 1);
9038 IEM_MC_LOCAL(uint32_t, u32Value);
9039 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
9040 IEM_MC_PUSH_U32_SREG(u32Value);
9041 IEM_MC_ADVANCE_RIP();
9042 IEM_MC_END();
9043 break;
9044
9045 case IEMMODE_64BIT:
9046 IEM_MC_BEGIN(0, 1);
9047 IEM_MC_LOCAL(uint64_t, u64Value);
9048 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
9049 IEM_MC_PUSH_U64(u64Value);
9050 IEM_MC_ADVANCE_RIP();
9051 IEM_MC_END();
9052 break;
9053 }
9054
9055 return VINF_SUCCESS;
9056}
9057
9058
9059/** Opcode 0x0f 0xa0. */
9060FNIEMOP_DEF(iemOp_push_fs)
9061{
9062 IEMOP_MNEMONIC(push_fs, "push fs");
9063 IEMOP_HLP_MIN_386();
9064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9065 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
9066}
9067
9068
9069/** Opcode 0x0f 0xa1. */
9070FNIEMOP_DEF(iemOp_pop_fs)
9071{
9072 IEMOP_MNEMONIC(pop_fs, "pop fs");
9073 IEMOP_HLP_MIN_386();
9074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9075 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
9076}
9077
9078
9079/** Opcode 0x0f 0xa2. */
9080FNIEMOP_DEF(iemOp_cpuid)
9081{
9082 IEMOP_MNEMONIC(cpuid, "cpuid");
9083 IEMOP_HLP_MIN_486(); /* not all 486es. */
9084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9085 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
9086}
9087
9088
9089/**
9090 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
9091 * iemOp_bts_Ev_Gv.
9092 */
9093FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
9094{
9095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9096 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9097
9098 if (IEM_IS_MODRM_REG_MODE(bRm))
9099 {
9100 /* register destination. */
9101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9102 switch (pVCpu->iem.s.enmEffOpSize)
9103 {
9104 case IEMMODE_16BIT:
9105 IEM_MC_BEGIN(3, 0);
9106 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9107 IEM_MC_ARG(uint16_t, u16Src, 1);
9108 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9109
9110 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9111 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
9112 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9113 IEM_MC_REF_EFLAGS(pEFlags);
9114 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9115
9116 IEM_MC_ADVANCE_RIP();
9117 IEM_MC_END();
9118 return VINF_SUCCESS;
9119
9120 case IEMMODE_32BIT:
9121 IEM_MC_BEGIN(3, 0);
9122 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9123 IEM_MC_ARG(uint32_t, u32Src, 1);
9124 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9125
9126 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9127 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
9128 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9129 IEM_MC_REF_EFLAGS(pEFlags);
9130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9131
9132 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9133 IEM_MC_ADVANCE_RIP();
9134 IEM_MC_END();
9135 return VINF_SUCCESS;
9136
9137 case IEMMODE_64BIT:
9138 IEM_MC_BEGIN(3, 0);
9139 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9140 IEM_MC_ARG(uint64_t, u64Src, 1);
9141 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9142
9143 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9144 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
9145 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9146 IEM_MC_REF_EFLAGS(pEFlags);
9147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9148
9149 IEM_MC_ADVANCE_RIP();
9150 IEM_MC_END();
9151 return VINF_SUCCESS;
9152
9153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9154 }
9155 }
9156 else
9157 {
9158 /* memory destination. */
9159
9160 uint32_t fAccess;
9161 if (pImpl->pfnLockedU16)
9162 fAccess = IEM_ACCESS_DATA_RW;
9163 else /* BT */
9164 fAccess = IEM_ACCESS_DATA_R;
9165
9166 /** @todo test negative bit offsets! */
9167 switch (pVCpu->iem.s.enmEffOpSize)
9168 {
9169 case IEMMODE_16BIT:
9170 IEM_MC_BEGIN(3, 2);
9171 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9172 IEM_MC_ARG(uint16_t, u16Src, 1);
9173 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9175 IEM_MC_LOCAL(int16_t, i16AddrAdj);
9176
9177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9178 if (pImpl->pfnLockedU16)
9179 IEMOP_HLP_DONE_DECODING();
9180 else
9181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9182 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9183 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
9184 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
9185 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
9186 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
9187 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
9188 IEM_MC_FETCH_EFLAGS(EFlags);
9189
9190 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9191 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9192 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9193 else
9194 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9195 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9196
9197 IEM_MC_COMMIT_EFLAGS(EFlags);
9198 IEM_MC_ADVANCE_RIP();
9199 IEM_MC_END();
9200 return VINF_SUCCESS;
9201
9202 case IEMMODE_32BIT:
9203 IEM_MC_BEGIN(3, 2);
9204 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9205 IEM_MC_ARG(uint32_t, u32Src, 1);
9206 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9208 IEM_MC_LOCAL(int32_t, i32AddrAdj);
9209
9210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9211 if (pImpl->pfnLockedU16)
9212 IEMOP_HLP_DONE_DECODING();
9213 else
9214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9215 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9216 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
9217 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
9218 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
9219 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
9220 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
9221 IEM_MC_FETCH_EFLAGS(EFlags);
9222
9223 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9224 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9226 else
9227 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9228 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9229
9230 IEM_MC_COMMIT_EFLAGS(EFlags);
9231 IEM_MC_ADVANCE_RIP();
9232 IEM_MC_END();
9233 return VINF_SUCCESS;
9234
9235 case IEMMODE_64BIT:
9236 IEM_MC_BEGIN(3, 2);
9237 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9238 IEM_MC_ARG(uint64_t, u64Src, 1);
9239 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9241 IEM_MC_LOCAL(int64_t, i64AddrAdj);
9242
9243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9244 if (pImpl->pfnLockedU16)
9245 IEMOP_HLP_DONE_DECODING();
9246 else
9247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9248 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9249 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
9250 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
9251 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
9252 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
9253 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
9254 IEM_MC_FETCH_EFLAGS(EFlags);
9255
9256 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9257 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9258 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9259 else
9260 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9262
9263 IEM_MC_COMMIT_EFLAGS(EFlags);
9264 IEM_MC_ADVANCE_RIP();
9265 IEM_MC_END();
9266 return VINF_SUCCESS;
9267
9268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9269 }
9270 }
9271}
9272
9273
9274/** Opcode 0x0f 0xa3. */
9275FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9276{
9277 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9278 IEMOP_HLP_MIN_386();
9279 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
9280}
9281
9282
9283/**
9284 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9285 */
9286FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9287{
9288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9290
9291 if (IEM_IS_MODRM_REG_MODE(bRm))
9292 {
9293 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9295
9296 switch (pVCpu->iem.s.enmEffOpSize)
9297 {
9298 case IEMMODE_16BIT:
9299 IEM_MC_BEGIN(4, 0);
9300 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9301 IEM_MC_ARG(uint16_t, u16Src, 1);
9302 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9303 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9304
9305 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9306 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9307 IEM_MC_REF_EFLAGS(pEFlags);
9308 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9309
9310 IEM_MC_ADVANCE_RIP();
9311 IEM_MC_END();
9312 return VINF_SUCCESS;
9313
9314 case IEMMODE_32BIT:
9315 IEM_MC_BEGIN(4, 0);
9316 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9317 IEM_MC_ARG(uint32_t, u32Src, 1);
9318 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9319 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9320
9321 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9322 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9323 IEM_MC_REF_EFLAGS(pEFlags);
9324 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9325
9326 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9327 IEM_MC_ADVANCE_RIP();
9328 IEM_MC_END();
9329 return VINF_SUCCESS;
9330
9331 case IEMMODE_64BIT:
9332 IEM_MC_BEGIN(4, 0);
9333 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9334 IEM_MC_ARG(uint64_t, u64Src, 1);
9335 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9336 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9337
9338 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9339 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9340 IEM_MC_REF_EFLAGS(pEFlags);
9341 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9342
9343 IEM_MC_ADVANCE_RIP();
9344 IEM_MC_END();
9345 return VINF_SUCCESS;
9346
9347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9348 }
9349 }
9350 else
9351 {
9352 switch (pVCpu->iem.s.enmEffOpSize)
9353 {
9354 case IEMMODE_16BIT:
9355 IEM_MC_BEGIN(4, 2);
9356 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9357 IEM_MC_ARG(uint16_t, u16Src, 1);
9358 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9359 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9361
9362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9363 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9364 IEM_MC_ASSIGN(cShiftArg, cShift);
9365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9366 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9367 IEM_MC_FETCH_EFLAGS(EFlags);
9368 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9369 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9370
9371 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9372 IEM_MC_COMMIT_EFLAGS(EFlags);
9373 IEM_MC_ADVANCE_RIP();
9374 IEM_MC_END();
9375 return VINF_SUCCESS;
9376
9377 case IEMMODE_32BIT:
9378 IEM_MC_BEGIN(4, 2);
9379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9380 IEM_MC_ARG(uint32_t, u32Src, 1);
9381 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9382 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9384
9385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9386 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9387 IEM_MC_ASSIGN(cShiftArg, cShift);
9388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9389 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9390 IEM_MC_FETCH_EFLAGS(EFlags);
9391 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9392 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9393
9394 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9395 IEM_MC_COMMIT_EFLAGS(EFlags);
9396 IEM_MC_ADVANCE_RIP();
9397 IEM_MC_END();
9398 return VINF_SUCCESS;
9399
9400 case IEMMODE_64BIT:
9401 IEM_MC_BEGIN(4, 2);
9402 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9403 IEM_MC_ARG(uint64_t, u64Src, 1);
9404 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9405 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9407
9408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9409 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9410 IEM_MC_ASSIGN(cShiftArg, cShift);
9411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9412 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9413 IEM_MC_FETCH_EFLAGS(EFlags);
9414 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9415 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9416
9417 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9418 IEM_MC_COMMIT_EFLAGS(EFlags);
9419 IEM_MC_ADVANCE_RIP();
9420 IEM_MC_END();
9421 return VINF_SUCCESS;
9422
9423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9424 }
9425 }
9426}
9427
9428
9429/**
9430 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9431 */
9432FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9433{
9434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9435 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9436
9437 if (IEM_IS_MODRM_REG_MODE(bRm))
9438 {
9439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9440
9441 switch (pVCpu->iem.s.enmEffOpSize)
9442 {
9443 case IEMMODE_16BIT:
9444 IEM_MC_BEGIN(4, 0);
9445 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9446 IEM_MC_ARG(uint16_t, u16Src, 1);
9447 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9448 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9449
9450 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9451 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9452 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9453 IEM_MC_REF_EFLAGS(pEFlags);
9454 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9455
9456 IEM_MC_ADVANCE_RIP();
9457 IEM_MC_END();
9458 return VINF_SUCCESS;
9459
9460 case IEMMODE_32BIT:
9461 IEM_MC_BEGIN(4, 0);
9462 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9463 IEM_MC_ARG(uint32_t, u32Src, 1);
9464 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9465 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9466
9467 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9468 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9469 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9470 IEM_MC_REF_EFLAGS(pEFlags);
9471 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9472
9473 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9474 IEM_MC_ADVANCE_RIP();
9475 IEM_MC_END();
9476 return VINF_SUCCESS;
9477
9478 case IEMMODE_64BIT:
9479 IEM_MC_BEGIN(4, 0);
9480 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9481 IEM_MC_ARG(uint64_t, u64Src, 1);
9482 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9483 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9484
9485 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9486 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9487 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9488 IEM_MC_REF_EFLAGS(pEFlags);
9489 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9490
9491 IEM_MC_ADVANCE_RIP();
9492 IEM_MC_END();
9493 return VINF_SUCCESS;
9494
9495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9496 }
9497 }
9498 else
9499 {
9500 switch (pVCpu->iem.s.enmEffOpSize)
9501 {
9502 case IEMMODE_16BIT:
9503 IEM_MC_BEGIN(4, 2);
9504 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9505 IEM_MC_ARG(uint16_t, u16Src, 1);
9506 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9507 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9509
9510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9512 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9513 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9514 IEM_MC_FETCH_EFLAGS(EFlags);
9515 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9516 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9517
9518 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9519 IEM_MC_COMMIT_EFLAGS(EFlags);
9520 IEM_MC_ADVANCE_RIP();
9521 IEM_MC_END();
9522 return VINF_SUCCESS;
9523
9524 case IEMMODE_32BIT:
9525 IEM_MC_BEGIN(4, 2);
9526 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9527 IEM_MC_ARG(uint32_t, u32Src, 1);
9528 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9529 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9531
9532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9534 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9535 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9536 IEM_MC_FETCH_EFLAGS(EFlags);
9537 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9538 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9539
9540 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9541 IEM_MC_COMMIT_EFLAGS(EFlags);
9542 IEM_MC_ADVANCE_RIP();
9543 IEM_MC_END();
9544 return VINF_SUCCESS;
9545
9546 case IEMMODE_64BIT:
9547 IEM_MC_BEGIN(4, 2);
9548 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9549 IEM_MC_ARG(uint64_t, u64Src, 1);
9550 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9551 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9553
9554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9556 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9557 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9558 IEM_MC_FETCH_EFLAGS(EFlags);
9559 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9560 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9561
9562 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9563 IEM_MC_COMMIT_EFLAGS(EFlags);
9564 IEM_MC_ADVANCE_RIP();
9565 IEM_MC_END();
9566 return VINF_SUCCESS;
9567
9568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9569 }
9570 }
9571}
9572
9573
9574
9575/** Opcode 0x0f 0xa4. */
9576FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9577{
9578 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9579 IEMOP_HLP_MIN_386();
9580 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9581}
9582
9583
9584/** Opcode 0x0f 0xa5. */
9585FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9586{
9587 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9588 IEMOP_HLP_MIN_386();
9589 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9590}
9591
9592
9593/** Opcode 0x0f 0xa8. */
9594FNIEMOP_DEF(iemOp_push_gs)
9595{
9596 IEMOP_MNEMONIC(push_gs, "push gs");
9597 IEMOP_HLP_MIN_386();
9598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9599 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9600}
9601
9602
9603/** Opcode 0x0f 0xa9. */
9604FNIEMOP_DEF(iemOp_pop_gs)
9605{
9606 IEMOP_MNEMONIC(pop_gs, "pop gs");
9607 IEMOP_HLP_MIN_386();
9608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9609 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9610}
9611
9612
9613/** Opcode 0x0f 0xaa. */
9614FNIEMOP_DEF(iemOp_rsm)
9615{
9616 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9617 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9619 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9620}
9621
9622
9623
9624/** Opcode 0x0f 0xab. */
9625FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9626{
9627 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9628 IEMOP_HLP_MIN_386();
9629 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
9630}
9631
9632
9633/** Opcode 0x0f 0xac. */
9634FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9635{
9636 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9637 IEMOP_HLP_MIN_386();
9638 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9639}
9640
9641
9642/** Opcode 0x0f 0xad. */
9643FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9644{
9645 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9646 IEMOP_HLP_MIN_386();
9647 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9648}
9649
9650
9651/** Opcode 0x0f 0xae mem/0. */
9652FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9653{
9654 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9655 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9656 return IEMOP_RAISE_INVALID_OPCODE();
9657
9658 IEM_MC_BEGIN(3, 1);
9659 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9660 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9661 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9664 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9665 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9666 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9667 IEM_MC_END();
9668 return VINF_SUCCESS;
9669}
9670
9671
9672/** Opcode 0x0f 0xae mem/1. */
9673FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9674{
9675 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9676 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9677 return IEMOP_RAISE_INVALID_OPCODE();
9678
9679 IEM_MC_BEGIN(3, 1);
9680 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9681 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9682 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9685 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9686 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9687 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9688 IEM_MC_END();
9689 return VINF_SUCCESS;
9690}
9691
9692
9693/**
9694 * @opmaps grp15
9695 * @opcode !11/2
9696 * @oppfx none
9697 * @opcpuid sse
9698 * @opgroup og_sse_mxcsrsm
9699 * @opxcpttype 5
9700 * @optest op1=0 -> mxcsr=0
9701 * @optest op1=0x2083 -> mxcsr=0x2083
9702 * @optest op1=0xfffffffe -> value.xcpt=0xd
9703 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9704 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9705 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9706 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9707 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9708 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9709 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9710 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9711 */
9712FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9713{
9714 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9715 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9716 return IEMOP_RAISE_INVALID_OPCODE();
9717
9718 IEM_MC_BEGIN(2, 0);
9719 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9720 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9723 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9724 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9725 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9726 IEM_MC_END();
9727 return VINF_SUCCESS;
9728}
9729
9730
9731/**
9732 * @opmaps grp15
9733 * @opcode !11/3
9734 * @oppfx none
9735 * @opcpuid sse
9736 * @opgroup og_sse_mxcsrsm
9737 * @opxcpttype 5
9738 * @optest mxcsr=0 -> op1=0
9739 * @optest mxcsr=0x2083 -> op1=0x2083
9740 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9741 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9742 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9743 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9744 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9745 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9746 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9747 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9748 */
9749FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9750{
9751 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9752 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9753 return IEMOP_RAISE_INVALID_OPCODE();
9754
9755 IEM_MC_BEGIN(2, 0);
9756 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9757 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9761 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9762 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9763 IEM_MC_END();
9764 return VINF_SUCCESS;
9765}
9766
9767
9768/**
9769 * @opmaps grp15
9770 * @opcode !11/4
9771 * @oppfx none
9772 * @opcpuid xsave
9773 * @opgroup og_system
9774 * @opxcpttype none
9775 */
9776FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9777{
9778 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9779 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9780 return IEMOP_RAISE_INVALID_OPCODE();
9781
9782 IEM_MC_BEGIN(3, 0);
9783 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9784 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9785 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9788 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9789 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9790 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9791 IEM_MC_END();
9792 return VINF_SUCCESS;
9793}
9794
9795
9796/**
9797 * @opmaps grp15
9798 * @opcode !11/5
9799 * @oppfx none
9800 * @opcpuid xsave
9801 * @opgroup og_system
9802 * @opxcpttype none
9803 */
9804FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9805{
9806 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9807 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9808 return IEMOP_RAISE_INVALID_OPCODE();
9809
9810 IEM_MC_BEGIN(3, 0);
9811 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9812 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9813 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9816 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9817 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9818 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9819 IEM_MC_END();
9820 return VINF_SUCCESS;
9821}
9822
9823/** Opcode 0x0f 0xae mem/6. */
9824FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9825
9826/**
9827 * @opmaps grp15
9828 * @opcode !11/7
9829 * @oppfx none
9830 * @opcpuid clfsh
9831 * @opgroup og_cachectl
9832 * @optest op1=1 ->
9833 */
9834FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9835{
9836 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9837 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9838 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9839
9840 IEM_MC_BEGIN(2, 0);
9841 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9842 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9845 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9846 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9847 IEM_MC_END();
9848 return VINF_SUCCESS;
9849}
9850
9851/**
9852 * @opmaps grp15
9853 * @opcode !11/7
9854 * @oppfx 0x66
9855 * @opcpuid clflushopt
9856 * @opgroup og_cachectl
9857 * @optest op1=1 ->
9858 */
9859FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9860{
9861 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9862 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9863 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9864
9865 IEM_MC_BEGIN(2, 0);
9866 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9867 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9870 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9871 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9872 IEM_MC_END();
9873 return VINF_SUCCESS;
9874}
9875
9876
9877/** Opcode 0x0f 0xae 11b/5. */
9878FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9879{
9880 RT_NOREF_PV(bRm);
9881 IEMOP_MNEMONIC(lfence, "lfence");
9882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9883 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9884 return IEMOP_RAISE_INVALID_OPCODE();
9885
9886 IEM_MC_BEGIN(0, 0);
9887#ifndef RT_ARCH_ARM64
9888 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9889#endif
9890 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9891#ifndef RT_ARCH_ARM64
9892 else
9893 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9894#endif
9895 IEM_MC_ADVANCE_RIP();
9896 IEM_MC_END();
9897 return VINF_SUCCESS;
9898}
9899
9900
9901/** Opcode 0x0f 0xae 11b/6. */
9902FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9903{
9904 RT_NOREF_PV(bRm);
9905 IEMOP_MNEMONIC(mfence, "mfence");
9906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9907 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9908 return IEMOP_RAISE_INVALID_OPCODE();
9909
9910 IEM_MC_BEGIN(0, 0);
9911#ifndef RT_ARCH_ARM64
9912 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9913#endif
9914 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9915#ifndef RT_ARCH_ARM64
9916 else
9917 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9918#endif
9919 IEM_MC_ADVANCE_RIP();
9920 IEM_MC_END();
9921 return VINF_SUCCESS;
9922}
9923
9924
9925/** Opcode 0x0f 0xae 11b/7. */
9926FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9927{
9928 RT_NOREF_PV(bRm);
9929 IEMOP_MNEMONIC(sfence, "sfence");
9930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9931 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9932 return IEMOP_RAISE_INVALID_OPCODE();
9933
9934 IEM_MC_BEGIN(0, 0);
9935#ifndef RT_ARCH_ARM64
9936 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9937#endif
9938 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9939#ifndef RT_ARCH_ARM64
9940 else
9941 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9942#endif
9943 IEM_MC_ADVANCE_RIP();
9944 IEM_MC_END();
9945 return VINF_SUCCESS;
9946}
9947
9948
9949/** Opcode 0xf3 0x0f 0xae 11b/0. */
9950FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9951{
9952 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9954 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9955 {
9956 IEM_MC_BEGIN(1, 0);
9957 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9958 IEM_MC_ARG(uint64_t, u64Dst, 0);
9959 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9960 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9961 IEM_MC_ADVANCE_RIP();
9962 IEM_MC_END();
9963 }
9964 else
9965 {
9966 IEM_MC_BEGIN(1, 0);
9967 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9968 IEM_MC_ARG(uint32_t, u32Dst, 0);
9969 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9970 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9971 IEM_MC_ADVANCE_RIP();
9972 IEM_MC_END();
9973 }
9974 return VINF_SUCCESS;
9975}
9976
9977
9978/** Opcode 0xf3 0x0f 0xae 11b/1. */
9979FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9980{
9981 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9983 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9984 {
9985 IEM_MC_BEGIN(1, 0);
9986 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9987 IEM_MC_ARG(uint64_t, u64Dst, 0);
9988 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9989 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9990 IEM_MC_ADVANCE_RIP();
9991 IEM_MC_END();
9992 }
9993 else
9994 {
9995 IEM_MC_BEGIN(1, 0);
9996 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9997 IEM_MC_ARG(uint32_t, u32Dst, 0);
9998 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9999 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10000 IEM_MC_ADVANCE_RIP();
10001 IEM_MC_END();
10002 }
10003 return VINF_SUCCESS;
10004}
10005
10006
10007/** Opcode 0xf3 0x0f 0xae 11b/2. */
10008FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10009{
10010 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10012 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10013 {
10014 IEM_MC_BEGIN(1, 0);
10015 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10016 IEM_MC_ARG(uint64_t, u64Dst, 0);
10017 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10018 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10019 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10020 IEM_MC_ADVANCE_RIP();
10021 IEM_MC_END();
10022 }
10023 else
10024 {
10025 IEM_MC_BEGIN(1, 0);
10026 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10027 IEM_MC_ARG(uint32_t, u32Dst, 0);
10028 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10029 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10030 IEM_MC_ADVANCE_RIP();
10031 IEM_MC_END();
10032 }
10033 return VINF_SUCCESS;
10034}
10035
10036
10037/** Opcode 0xf3 0x0f 0xae 11b/3. */
10038FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10039{
10040 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10042 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10043 {
10044 IEM_MC_BEGIN(1, 0);
10045 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10046 IEM_MC_ARG(uint64_t, u64Dst, 0);
10047 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10048 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10049 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10050 IEM_MC_ADVANCE_RIP();
10051 IEM_MC_END();
10052 }
10053 else
10054 {
10055 IEM_MC_BEGIN(1, 0);
10056 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10057 IEM_MC_ARG(uint32_t, u32Dst, 0);
10058 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10059 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10060 IEM_MC_ADVANCE_RIP();
10061 IEM_MC_END();
10062 }
10063 return VINF_SUCCESS;
10064}
10065
10066
10067/**
10068 * Group 15 jump table for register variant.
10069 */
10070IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10071{ /* pfx: none, 066h, 0f3h, 0f2h */
10072 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10073 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10074 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10075 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10076 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10077 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10078 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10079 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10080};
10081AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10082
10083
10084/**
10085 * Group 15 jump table for memory variant.
10086 */
10087IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10088{ /* pfx: none, 066h, 0f3h, 0f2h */
10089 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10090 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10091 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10092 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10093 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10094 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10095 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10096 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10097};
10098AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10099
10100
10101/** Opcode 0x0f 0xae. */
10102FNIEMOP_DEF(iemOp_Grp15)
10103{
10104 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10106 if (IEM_IS_MODRM_REG_MODE(bRm))
10107 /* register, register */
10108 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10109 + pVCpu->iem.s.idxPrefix], bRm);
10110 /* memory, register */
10111 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10112 + pVCpu->iem.s.idxPrefix], bRm);
10113}
10114
10115
10116/** Opcode 0x0f 0xaf. */
10117FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10118{
10119 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10120 IEMOP_HLP_MIN_386();
10121 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10122 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
10123}
10124
10125
10126/** Opcode 0x0f 0xb0. */
10127FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10128{
10129 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10130 IEMOP_HLP_MIN_486();
10131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10132
10133 if (IEM_IS_MODRM_REG_MODE(bRm))
10134 {
10135 IEMOP_HLP_DONE_DECODING();
10136 IEM_MC_BEGIN(4, 0);
10137 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10138 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10139 IEM_MC_ARG(uint8_t, u8Src, 2);
10140 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10141
10142 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10143 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10144 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10145 IEM_MC_REF_EFLAGS(pEFlags);
10146 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10147 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10148 else
10149 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10150
10151 IEM_MC_ADVANCE_RIP();
10152 IEM_MC_END();
10153 }
10154 else
10155 {
10156 IEM_MC_BEGIN(4, 3);
10157 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10158 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10159 IEM_MC_ARG(uint8_t, u8Src, 2);
10160 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10162 IEM_MC_LOCAL(uint8_t, u8Al);
10163
10164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10165 IEMOP_HLP_DONE_DECODING();
10166 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10167 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10168 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10169 IEM_MC_FETCH_EFLAGS(EFlags);
10170 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10171 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10172 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10173 else
10174 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10175
10176 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10177 IEM_MC_COMMIT_EFLAGS(EFlags);
10178 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10179 IEM_MC_ADVANCE_RIP();
10180 IEM_MC_END();
10181 }
10182 return VINF_SUCCESS;
10183}
10184
10185/** Opcode 0x0f 0xb1. */
10186FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10187{
10188 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10189 IEMOP_HLP_MIN_486();
10190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10191
10192 if (IEM_IS_MODRM_REG_MODE(bRm))
10193 {
10194 IEMOP_HLP_DONE_DECODING();
10195 switch (pVCpu->iem.s.enmEffOpSize)
10196 {
10197 case IEMMODE_16BIT:
10198 IEM_MC_BEGIN(4, 0);
10199 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10200 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10201 IEM_MC_ARG(uint16_t, u16Src, 2);
10202 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10203
10204 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10205 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10206 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10207 IEM_MC_REF_EFLAGS(pEFlags);
10208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10209 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10210 else
10211 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10212
10213 IEM_MC_ADVANCE_RIP();
10214 IEM_MC_END();
10215 return VINF_SUCCESS;
10216
10217 case IEMMODE_32BIT:
10218 IEM_MC_BEGIN(4, 0);
10219 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10220 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10221 IEM_MC_ARG(uint32_t, u32Src, 2);
10222 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10223
10224 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10225 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10226 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10227 IEM_MC_REF_EFLAGS(pEFlags);
10228 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10229 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10230 else
10231 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10232
10233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10234 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10235 } IEM_MC_ELSE() {
10236 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10237 } IEM_MC_ENDIF();
10238
10239 IEM_MC_ADVANCE_RIP();
10240 IEM_MC_END();
10241 return VINF_SUCCESS;
10242
10243 case IEMMODE_64BIT:
10244 IEM_MC_BEGIN(4, 0);
10245 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10246 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10247#ifdef RT_ARCH_X86
10248 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10249#else
10250 IEM_MC_ARG(uint64_t, u64Src, 2);
10251#endif
10252 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10253
10254 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10255 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10256 IEM_MC_REF_EFLAGS(pEFlags);
10257#ifdef RT_ARCH_X86
10258 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10259 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10260 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10261 else
10262 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10263#else
10264 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10265 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10266 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10267 else
10268 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10269#endif
10270
10271 IEM_MC_ADVANCE_RIP();
10272 IEM_MC_END();
10273 return VINF_SUCCESS;
10274
10275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10276 }
10277 }
10278 else
10279 {
10280 switch (pVCpu->iem.s.enmEffOpSize)
10281 {
10282 case IEMMODE_16BIT:
10283 IEM_MC_BEGIN(4, 3);
10284 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10285 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10286 IEM_MC_ARG(uint16_t, u16Src, 2);
10287 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10289 IEM_MC_LOCAL(uint16_t, u16Ax);
10290
10291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10292 IEMOP_HLP_DONE_DECODING();
10293 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10294 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10295 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10296 IEM_MC_FETCH_EFLAGS(EFlags);
10297 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10298 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10299 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10300 else
10301 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10302
10303 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10304 IEM_MC_COMMIT_EFLAGS(EFlags);
10305 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10306 IEM_MC_ADVANCE_RIP();
10307 IEM_MC_END();
10308 return VINF_SUCCESS;
10309
10310 case IEMMODE_32BIT:
10311 IEM_MC_BEGIN(4, 3);
10312 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10313 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10314 IEM_MC_ARG(uint32_t, u32Src, 2);
10315 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10317 IEM_MC_LOCAL(uint32_t, u32Eax);
10318
10319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10320 IEMOP_HLP_DONE_DECODING();
10321 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10322 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10323 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10324 IEM_MC_FETCH_EFLAGS(EFlags);
10325 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10326 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10327 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10328 else
10329 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10330
10331 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10332 IEM_MC_COMMIT_EFLAGS(EFlags);
10333 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10334 IEM_MC_ADVANCE_RIP();
10335 IEM_MC_END();
10336 return VINF_SUCCESS;
10337
10338 case IEMMODE_64BIT:
10339 IEM_MC_BEGIN(4, 3);
10340 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10341 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10342#ifdef RT_ARCH_X86
10343 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10344#else
10345 IEM_MC_ARG(uint64_t, u64Src, 2);
10346#endif
10347 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10349 IEM_MC_LOCAL(uint64_t, u64Rax);
10350
10351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10352 IEMOP_HLP_DONE_DECODING();
10353 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10354 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10355 IEM_MC_FETCH_EFLAGS(EFlags);
10356 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10357#ifdef RT_ARCH_X86
10358 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10359 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10360 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10361 else
10362 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10363#else
10364 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10365 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10366 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10367 else
10368 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10369#endif
10370
10371 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10372 IEM_MC_COMMIT_EFLAGS(EFlags);
10373 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10374 IEM_MC_ADVANCE_RIP();
10375 IEM_MC_END();
10376 return VINF_SUCCESS;
10377
10378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10379 }
10380 }
10381}
10382
10383
10384FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
10385{
10386 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
10387 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
10388
10389 switch (pVCpu->iem.s.enmEffOpSize)
10390 {
10391 case IEMMODE_16BIT:
10392 IEM_MC_BEGIN(5, 1);
10393 IEM_MC_ARG(uint16_t, uSel, 0);
10394 IEM_MC_ARG(uint16_t, offSeg, 1);
10395 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10396 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10397 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10398 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10401 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10402 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
10403 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10404 IEM_MC_END();
10405 return VINF_SUCCESS;
10406
10407 case IEMMODE_32BIT:
10408 IEM_MC_BEGIN(5, 1);
10409 IEM_MC_ARG(uint16_t, uSel, 0);
10410 IEM_MC_ARG(uint32_t, offSeg, 1);
10411 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10412 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10413 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10414 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10417 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10418 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
10419 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10420 IEM_MC_END();
10421 return VINF_SUCCESS;
10422
10423 case IEMMODE_64BIT:
10424 IEM_MC_BEGIN(5, 1);
10425 IEM_MC_ARG(uint16_t, uSel, 0);
10426 IEM_MC_ARG(uint64_t, offSeg, 1);
10427 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10428 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10429 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10430 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10433 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
10434 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10435 else
10436 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10437 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
10438 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10439 IEM_MC_END();
10440 return VINF_SUCCESS;
10441
10442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10443 }
10444}
10445
10446
10447/** Opcode 0x0f 0xb2. */
10448FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10449{
10450 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10451 IEMOP_HLP_MIN_386();
10452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10453 if (IEM_IS_MODRM_REG_MODE(bRm))
10454 return IEMOP_RAISE_INVALID_OPCODE();
10455 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10456}
10457
10458
10459/** Opcode 0x0f 0xb3. */
10460FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10461{
10462 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10463 IEMOP_HLP_MIN_386();
10464 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
10465}
10466
10467
10468/** Opcode 0x0f 0xb4. */
10469FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10470{
10471 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10472 IEMOP_HLP_MIN_386();
10473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10474 if (IEM_IS_MODRM_REG_MODE(bRm))
10475 return IEMOP_RAISE_INVALID_OPCODE();
10476 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10477}
10478
10479
10480/** Opcode 0x0f 0xb5. */
10481FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10482{
10483 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10484 IEMOP_HLP_MIN_386();
10485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10486 if (IEM_IS_MODRM_REG_MODE(bRm))
10487 return IEMOP_RAISE_INVALID_OPCODE();
10488 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10489}
10490
10491
10492/** Opcode 0x0f 0xb6. */
10493FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10494{
10495 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10496 IEMOP_HLP_MIN_386();
10497
10498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10499
10500 /*
10501 * If rm is denoting a register, no more instruction bytes.
10502 */
10503 if (IEM_IS_MODRM_REG_MODE(bRm))
10504 {
10505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10506 switch (pVCpu->iem.s.enmEffOpSize)
10507 {
10508 case IEMMODE_16BIT:
10509 IEM_MC_BEGIN(0, 1);
10510 IEM_MC_LOCAL(uint16_t, u16Value);
10511 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10512 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10513 IEM_MC_ADVANCE_RIP();
10514 IEM_MC_END();
10515 return VINF_SUCCESS;
10516
10517 case IEMMODE_32BIT:
10518 IEM_MC_BEGIN(0, 1);
10519 IEM_MC_LOCAL(uint32_t, u32Value);
10520 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10521 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10522 IEM_MC_ADVANCE_RIP();
10523 IEM_MC_END();
10524 return VINF_SUCCESS;
10525
10526 case IEMMODE_64BIT:
10527 IEM_MC_BEGIN(0, 1);
10528 IEM_MC_LOCAL(uint64_t, u64Value);
10529 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10530 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10531 IEM_MC_ADVANCE_RIP();
10532 IEM_MC_END();
10533 return VINF_SUCCESS;
10534
10535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10536 }
10537 }
10538 else
10539 {
10540 /*
10541 * We're loading a register from memory.
10542 */
10543 switch (pVCpu->iem.s.enmEffOpSize)
10544 {
10545 case IEMMODE_16BIT:
10546 IEM_MC_BEGIN(0, 2);
10547 IEM_MC_LOCAL(uint16_t, u16Value);
10548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10551 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10552 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10553 IEM_MC_ADVANCE_RIP();
10554 IEM_MC_END();
10555 return VINF_SUCCESS;
10556
10557 case IEMMODE_32BIT:
10558 IEM_MC_BEGIN(0, 2);
10559 IEM_MC_LOCAL(uint32_t, u32Value);
10560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10563 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10564 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10565 IEM_MC_ADVANCE_RIP();
10566 IEM_MC_END();
10567 return VINF_SUCCESS;
10568
10569 case IEMMODE_64BIT:
10570 IEM_MC_BEGIN(0, 2);
10571 IEM_MC_LOCAL(uint64_t, u64Value);
10572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10575 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10576 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10577 IEM_MC_ADVANCE_RIP();
10578 IEM_MC_END();
10579 return VINF_SUCCESS;
10580
10581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10582 }
10583 }
10584}
10585
10586
10587/** Opcode 0x0f 0xb7. */
10588FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10589{
10590 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10591 IEMOP_HLP_MIN_386();
10592
10593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10594
10595 /** @todo Not entirely sure how the operand size prefix is handled here,
10596 * assuming that it will be ignored. Would be nice to have a few
10597 * test for this. */
10598 /*
10599 * If rm is denoting a register, no more instruction bytes.
10600 */
10601 if (IEM_IS_MODRM_REG_MODE(bRm))
10602 {
10603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10604 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10605 {
10606 IEM_MC_BEGIN(0, 1);
10607 IEM_MC_LOCAL(uint32_t, u32Value);
10608 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10609 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10610 IEM_MC_ADVANCE_RIP();
10611 IEM_MC_END();
10612 }
10613 else
10614 {
10615 IEM_MC_BEGIN(0, 1);
10616 IEM_MC_LOCAL(uint64_t, u64Value);
10617 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10618 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10619 IEM_MC_ADVANCE_RIP();
10620 IEM_MC_END();
10621 }
10622 }
10623 else
10624 {
10625 /*
10626 * We're loading a register from memory.
10627 */
10628 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10629 {
10630 IEM_MC_BEGIN(0, 2);
10631 IEM_MC_LOCAL(uint32_t, u32Value);
10632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10635 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10636 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10637 IEM_MC_ADVANCE_RIP();
10638 IEM_MC_END();
10639 }
10640 else
10641 {
10642 IEM_MC_BEGIN(0, 2);
10643 IEM_MC_LOCAL(uint64_t, u64Value);
10644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10647 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10648 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10649 IEM_MC_ADVANCE_RIP();
10650 IEM_MC_END();
10651 }
10652 }
10653 return VINF_SUCCESS;
10654}
10655
10656
10657/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10658FNIEMOP_UD_STUB(iemOp_jmpe);
10659
10660
10661/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10662FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10663{
10664 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10665 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10666 return iemOp_InvalidNeedRM(pVCpu);
10667#ifndef TST_IEM_CHECK_MC
10668# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10669 static const IEMOPBINSIZES s_Native =
10670 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10671# endif
10672 static const IEMOPBINSIZES s_Fallback =
10673 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10674#endif
10675 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
10676}
10677
10678
10679/**
10680 * @opcode 0xb9
10681 * @opinvalid intel-modrm
10682 * @optest ->
10683 */
10684FNIEMOP_DEF(iemOp_Grp10)
10685{
10686 /*
10687 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10688 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10689 */
10690 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10691 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10692 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10693}
10694
10695
10696/** Opcode 0x0f 0xba. */
10697FNIEMOP_DEF(iemOp_Grp8)
10698{
10699 IEMOP_HLP_MIN_386();
10700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10701 PCIEMOPBINSIZES pImpl;
10702 switch (IEM_GET_MODRM_REG_8(bRm))
10703 {
10704 case 0: case 1: case 2: case 3:
10705 /* Both AMD and Intel want full modr/m decoding and imm8. */
10706 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10707 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
10708 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
10709 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
10710 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
10711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10712 }
10713 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10714
10715 if (IEM_IS_MODRM_REG_MODE(bRm))
10716 {
10717 /* register destination. */
10718 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10720
10721 switch (pVCpu->iem.s.enmEffOpSize)
10722 {
10723 case IEMMODE_16BIT:
10724 IEM_MC_BEGIN(3, 0);
10725 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10726 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
10727 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10728
10729 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10730 IEM_MC_REF_EFLAGS(pEFlags);
10731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10732
10733 IEM_MC_ADVANCE_RIP();
10734 IEM_MC_END();
10735 return VINF_SUCCESS;
10736
10737 case IEMMODE_32BIT:
10738 IEM_MC_BEGIN(3, 0);
10739 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10740 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
10741 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10742
10743 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10744 IEM_MC_REF_EFLAGS(pEFlags);
10745 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10746
10747 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10748 IEM_MC_ADVANCE_RIP();
10749 IEM_MC_END();
10750 return VINF_SUCCESS;
10751
10752 case IEMMODE_64BIT:
10753 IEM_MC_BEGIN(3, 0);
10754 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10755 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
10756 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10757
10758 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10759 IEM_MC_REF_EFLAGS(pEFlags);
10760 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10761
10762 IEM_MC_ADVANCE_RIP();
10763 IEM_MC_END();
10764 return VINF_SUCCESS;
10765
10766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10767 }
10768 }
10769 else
10770 {
10771 /* memory destination. */
10772
10773 uint32_t fAccess;
10774 if (pImpl->pfnLockedU16)
10775 fAccess = IEM_ACCESS_DATA_RW;
10776 else /* BT */
10777 fAccess = IEM_ACCESS_DATA_R;
10778
10779 /** @todo test negative bit offsets! */
10780 switch (pVCpu->iem.s.enmEffOpSize)
10781 {
10782 case IEMMODE_16BIT:
10783 IEM_MC_BEGIN(3, 1);
10784 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10785 IEM_MC_ARG(uint16_t, u16Src, 1);
10786 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10788
10789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10790 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10791 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
10792 if (pImpl->pfnLockedU16)
10793 IEMOP_HLP_DONE_DECODING();
10794 else
10795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10796 IEM_MC_FETCH_EFLAGS(EFlags);
10797 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10798 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10800 else
10801 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10802 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10803
10804 IEM_MC_COMMIT_EFLAGS(EFlags);
10805 IEM_MC_ADVANCE_RIP();
10806 IEM_MC_END();
10807 return VINF_SUCCESS;
10808
10809 case IEMMODE_32BIT:
10810 IEM_MC_BEGIN(3, 1);
10811 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10812 IEM_MC_ARG(uint32_t, u32Src, 1);
10813 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10815
10816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10817 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10818 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
10819 if (pImpl->pfnLockedU16)
10820 IEMOP_HLP_DONE_DECODING();
10821 else
10822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10823 IEM_MC_FETCH_EFLAGS(EFlags);
10824 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10825 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10826 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10827 else
10828 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10829 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10830
10831 IEM_MC_COMMIT_EFLAGS(EFlags);
10832 IEM_MC_ADVANCE_RIP();
10833 IEM_MC_END();
10834 return VINF_SUCCESS;
10835
10836 case IEMMODE_64BIT:
10837 IEM_MC_BEGIN(3, 1);
10838 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10839 IEM_MC_ARG(uint64_t, u64Src, 1);
10840 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10842
10843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10844 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10845 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
10846 if (pImpl->pfnLockedU16)
10847 IEMOP_HLP_DONE_DECODING();
10848 else
10849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10850 IEM_MC_FETCH_EFLAGS(EFlags);
10851 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10852 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10853 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10854 else
10855 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10856 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10857
10858 IEM_MC_COMMIT_EFLAGS(EFlags);
10859 IEM_MC_ADVANCE_RIP();
10860 IEM_MC_END();
10861 return VINF_SUCCESS;
10862
10863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10864 }
10865 }
10866}
10867
10868
10869/** Opcode 0x0f 0xbb. */
10870FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10871{
10872 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10873 IEMOP_HLP_MIN_386();
10874 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
10875}
10876
10877
10878/**
10879 * Common worker for BSF and BSR instructions.
10880 *
10881 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10882 * the destination register, which means that for 32-bit operations the high
10883 * bits must be left alone.
10884 *
10885 * @param pImpl Pointer to the instruction implementation (assembly).
10886 */
10887FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10888{
10889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10890
10891 /*
10892 * If rm is denoting a register, no more instruction bytes.
10893 */
10894 if (IEM_IS_MODRM_REG_MODE(bRm))
10895 {
10896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10897 switch (pVCpu->iem.s.enmEffOpSize)
10898 {
10899 case IEMMODE_16BIT:
10900 IEM_MC_BEGIN(3, 0);
10901 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10902 IEM_MC_ARG(uint16_t, u16Src, 1);
10903 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10904
10905 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10906 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10907 IEM_MC_REF_EFLAGS(pEFlags);
10908 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10909
10910 IEM_MC_ADVANCE_RIP();
10911 IEM_MC_END();
10912 break;
10913
10914 case IEMMODE_32BIT:
10915 IEM_MC_BEGIN(3, 0);
10916 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10917 IEM_MC_ARG(uint32_t, u32Src, 1);
10918 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10919
10920 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10921 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10922 IEM_MC_REF_EFLAGS(pEFlags);
10923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10924 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10925 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10926 IEM_MC_ENDIF();
10927 IEM_MC_ADVANCE_RIP();
10928 IEM_MC_END();
10929 break;
10930
10931 case IEMMODE_64BIT:
10932 IEM_MC_BEGIN(3, 0);
10933 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10934 IEM_MC_ARG(uint64_t, u64Src, 1);
10935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10936
10937 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10938 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10939 IEM_MC_REF_EFLAGS(pEFlags);
10940 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10941
10942 IEM_MC_ADVANCE_RIP();
10943 IEM_MC_END();
10944 break;
10945 }
10946 }
10947 else
10948 {
10949 /*
10950 * We're accessing memory.
10951 */
10952 switch (pVCpu->iem.s.enmEffOpSize)
10953 {
10954 case IEMMODE_16BIT:
10955 IEM_MC_BEGIN(3, 1);
10956 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10957 IEM_MC_ARG(uint16_t, u16Src, 1);
10958 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10960
10961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10963 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10964 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10965 IEM_MC_REF_EFLAGS(pEFlags);
10966 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10967
10968 IEM_MC_ADVANCE_RIP();
10969 IEM_MC_END();
10970 break;
10971
10972 case IEMMODE_32BIT:
10973 IEM_MC_BEGIN(3, 1);
10974 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10975 IEM_MC_ARG(uint32_t, u32Src, 1);
10976 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10978
10979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10981 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10982 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10983 IEM_MC_REF_EFLAGS(pEFlags);
10984 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10985
10986 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10987 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10988 IEM_MC_ENDIF();
10989 IEM_MC_ADVANCE_RIP();
10990 IEM_MC_END();
10991 break;
10992
10993 case IEMMODE_64BIT:
10994 IEM_MC_BEGIN(3, 1);
10995 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10996 IEM_MC_ARG(uint64_t, u64Src, 1);
10997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10999
11000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11002 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11003 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11004 IEM_MC_REF_EFLAGS(pEFlags);
11005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11006
11007 IEM_MC_ADVANCE_RIP();
11008 IEM_MC_END();
11009 break;
11010 }
11011 }
11012 return VINF_SUCCESS;
11013}
11014
11015
11016/** Opcode 0x0f 0xbc. */
11017FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11018{
11019 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11020 IEMOP_HLP_MIN_386();
11021 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11022 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11023}
11024
11025
11026/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11027FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11028{
11029 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11030 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11031 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11032
11033#ifndef TST_IEM_CHECK_MC
11034 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11035 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11036 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11037 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11038 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11039 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11040 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11041 {
11042 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11043 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11044 };
11045#endif
11046 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11047 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
11048 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
11049}
11050
11051
11052/** Opcode 0x0f 0xbd. */
11053FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11054{
11055 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11056 IEMOP_HLP_MIN_386();
11057 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11058 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11059}
11060
11061
11062/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11063FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11064{
11065 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11066 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11067 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11068
11069#ifndef TST_IEM_CHECK_MC
11070 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11071 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11072 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11073 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11074 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11075 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11076 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11077 {
11078 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11079 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11080 };
11081#endif
11082 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11083 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
11084 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
11085}
11086
11087
11088
11089/** Opcode 0x0f 0xbe. */
11090FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11091{
11092 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11093 IEMOP_HLP_MIN_386();
11094
11095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11096
11097 /*
11098 * If rm is denoting a register, no more instruction bytes.
11099 */
11100 if (IEM_IS_MODRM_REG_MODE(bRm))
11101 {
11102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11103 switch (pVCpu->iem.s.enmEffOpSize)
11104 {
11105 case IEMMODE_16BIT:
11106 IEM_MC_BEGIN(0, 1);
11107 IEM_MC_LOCAL(uint16_t, u16Value);
11108 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11109 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11110 IEM_MC_ADVANCE_RIP();
11111 IEM_MC_END();
11112 return VINF_SUCCESS;
11113
11114 case IEMMODE_32BIT:
11115 IEM_MC_BEGIN(0, 1);
11116 IEM_MC_LOCAL(uint32_t, u32Value);
11117 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11118 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11119 IEM_MC_ADVANCE_RIP();
11120 IEM_MC_END();
11121 return VINF_SUCCESS;
11122
11123 case IEMMODE_64BIT:
11124 IEM_MC_BEGIN(0, 1);
11125 IEM_MC_LOCAL(uint64_t, u64Value);
11126 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11127 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11128 IEM_MC_ADVANCE_RIP();
11129 IEM_MC_END();
11130 return VINF_SUCCESS;
11131
11132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11133 }
11134 }
11135 else
11136 {
11137 /*
11138 * We're loading a register from memory.
11139 */
11140 switch (pVCpu->iem.s.enmEffOpSize)
11141 {
11142 case IEMMODE_16BIT:
11143 IEM_MC_BEGIN(0, 2);
11144 IEM_MC_LOCAL(uint16_t, u16Value);
11145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11148 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11149 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11150 IEM_MC_ADVANCE_RIP();
11151 IEM_MC_END();
11152 return VINF_SUCCESS;
11153
11154 case IEMMODE_32BIT:
11155 IEM_MC_BEGIN(0, 2);
11156 IEM_MC_LOCAL(uint32_t, u32Value);
11157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11160 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11161 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11162 IEM_MC_ADVANCE_RIP();
11163 IEM_MC_END();
11164 return VINF_SUCCESS;
11165
11166 case IEMMODE_64BIT:
11167 IEM_MC_BEGIN(0, 2);
11168 IEM_MC_LOCAL(uint64_t, u64Value);
11169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11172 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11173 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11174 IEM_MC_ADVANCE_RIP();
11175 IEM_MC_END();
11176 return VINF_SUCCESS;
11177
11178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11179 }
11180 }
11181}
11182
11183
11184/** Opcode 0x0f 0xbf. */
11185FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11186{
11187 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11188 IEMOP_HLP_MIN_386();
11189
11190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11191
11192 /** @todo Not entirely sure how the operand size prefix is handled here,
11193 * assuming that it will be ignored. Would be nice to have a few
11194 * test for this. */
11195 /*
11196 * If rm is denoting a register, no more instruction bytes.
11197 */
11198 if (IEM_IS_MODRM_REG_MODE(bRm))
11199 {
11200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11201 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11202 {
11203 IEM_MC_BEGIN(0, 1);
11204 IEM_MC_LOCAL(uint32_t, u32Value);
11205 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11206 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11207 IEM_MC_ADVANCE_RIP();
11208 IEM_MC_END();
11209 }
11210 else
11211 {
11212 IEM_MC_BEGIN(0, 1);
11213 IEM_MC_LOCAL(uint64_t, u64Value);
11214 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11215 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11216 IEM_MC_ADVANCE_RIP();
11217 IEM_MC_END();
11218 }
11219 }
11220 else
11221 {
11222 /*
11223 * We're loading a register from memory.
11224 */
11225 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11226 {
11227 IEM_MC_BEGIN(0, 2);
11228 IEM_MC_LOCAL(uint32_t, u32Value);
11229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11232 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11233 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11234 IEM_MC_ADVANCE_RIP();
11235 IEM_MC_END();
11236 }
11237 else
11238 {
11239 IEM_MC_BEGIN(0, 2);
11240 IEM_MC_LOCAL(uint64_t, u64Value);
11241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11244 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11245 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11246 IEM_MC_ADVANCE_RIP();
11247 IEM_MC_END();
11248 }
11249 }
11250 return VINF_SUCCESS;
11251}
11252
11253
11254/** Opcode 0x0f 0xc0. */
11255FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11256{
11257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11258 IEMOP_HLP_MIN_486();
11259 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11260
11261 /*
11262 * If rm is denoting a register, no more instruction bytes.
11263 */
11264 if (IEM_IS_MODRM_REG_MODE(bRm))
11265 {
11266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11267
11268 IEM_MC_BEGIN(3, 0);
11269 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11270 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11272
11273 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11274 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11275 IEM_MC_REF_EFLAGS(pEFlags);
11276 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11277
11278 IEM_MC_ADVANCE_RIP();
11279 IEM_MC_END();
11280 }
11281 else
11282 {
11283 /*
11284 * We're accessing memory.
11285 */
11286 IEM_MC_BEGIN(3, 3);
11287 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11288 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11289 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11290 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11292
11293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11294 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11295 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11296 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11297 IEM_MC_FETCH_EFLAGS(EFlags);
11298 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11299 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11300 else
11301 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11302
11303 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11304 IEM_MC_COMMIT_EFLAGS(EFlags);
11305 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11306 IEM_MC_ADVANCE_RIP();
11307 IEM_MC_END();
11308 return VINF_SUCCESS;
11309 }
11310 return VINF_SUCCESS;
11311}
11312
11313
11314/** Opcode 0x0f 0xc1. */
11315FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11316{
11317 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11318 IEMOP_HLP_MIN_486();
11319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11320
11321 /*
11322 * If rm is denoting a register, no more instruction bytes.
11323 */
11324 if (IEM_IS_MODRM_REG_MODE(bRm))
11325 {
11326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11327
11328 switch (pVCpu->iem.s.enmEffOpSize)
11329 {
11330 case IEMMODE_16BIT:
11331 IEM_MC_BEGIN(3, 0);
11332 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11333 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11334 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11335
11336 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11337 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11338 IEM_MC_REF_EFLAGS(pEFlags);
11339 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11340
11341 IEM_MC_ADVANCE_RIP();
11342 IEM_MC_END();
11343 return VINF_SUCCESS;
11344
11345 case IEMMODE_32BIT:
11346 IEM_MC_BEGIN(3, 0);
11347 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11348 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11349 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11350
11351 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11352 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11353 IEM_MC_REF_EFLAGS(pEFlags);
11354 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11355
11356 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11357 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11358 IEM_MC_ADVANCE_RIP();
11359 IEM_MC_END();
11360 return VINF_SUCCESS;
11361
11362 case IEMMODE_64BIT:
11363 IEM_MC_BEGIN(3, 0);
11364 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11365 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11366 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11367
11368 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11369 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11370 IEM_MC_REF_EFLAGS(pEFlags);
11371 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11372
11373 IEM_MC_ADVANCE_RIP();
11374 IEM_MC_END();
11375 return VINF_SUCCESS;
11376
11377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11378 }
11379 }
11380 else
11381 {
11382 /*
11383 * We're accessing memory.
11384 */
11385 switch (pVCpu->iem.s.enmEffOpSize)
11386 {
11387 case IEMMODE_16BIT:
11388 IEM_MC_BEGIN(3, 3);
11389 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11390 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11391 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11392 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11394
11395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11396 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11397 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11398 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11399 IEM_MC_FETCH_EFLAGS(EFlags);
11400 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11401 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11402 else
11403 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11404
11405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11406 IEM_MC_COMMIT_EFLAGS(EFlags);
11407 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11408 IEM_MC_ADVANCE_RIP();
11409 IEM_MC_END();
11410 return VINF_SUCCESS;
11411
11412 case IEMMODE_32BIT:
11413 IEM_MC_BEGIN(3, 3);
11414 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11415 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11416 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11417 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11419
11420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11421 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11422 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11423 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11424 IEM_MC_FETCH_EFLAGS(EFlags);
11425 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11426 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11427 else
11428 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11429
11430 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11431 IEM_MC_COMMIT_EFLAGS(EFlags);
11432 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11433 IEM_MC_ADVANCE_RIP();
11434 IEM_MC_END();
11435 return VINF_SUCCESS;
11436
11437 case IEMMODE_64BIT:
11438 IEM_MC_BEGIN(3, 3);
11439 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11440 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11441 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11442 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11444
11445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11446 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11447 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11448 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11449 IEM_MC_FETCH_EFLAGS(EFlags);
11450 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11451 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11452 else
11453 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11454
11455 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11456 IEM_MC_COMMIT_EFLAGS(EFlags);
11457 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11458 IEM_MC_ADVANCE_RIP();
11459 IEM_MC_END();
11460 return VINF_SUCCESS;
11461
11462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11463 }
11464 }
11465}
11466
11467
11468/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11469FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11470{
11471 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11472
11473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11474 if (IEM_IS_MODRM_REG_MODE(bRm))
11475 {
11476 /*
11477 * Register, register.
11478 */
11479 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11481 IEM_MC_BEGIN(4, 2);
11482 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11483 IEM_MC_LOCAL(X86XMMREG, Dst);
11484 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11485 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11486 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11487 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11488 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11489 IEM_MC_PREPARE_SSE_USAGE();
11490 IEM_MC_REF_MXCSR(pfMxcsr);
11491 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11492 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11493 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11494 IEM_MC_IF_MXCSR_XCPT_PENDING()
11495 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11496 IEM_MC_ELSE()
11497 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11498 IEM_MC_ENDIF();
11499
11500 IEM_MC_ADVANCE_RIP();
11501 IEM_MC_END();
11502 }
11503 else
11504 {
11505 /*
11506 * Register, memory.
11507 */
11508 IEM_MC_BEGIN(4, 3);
11509 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11510 IEM_MC_LOCAL(X86XMMREG, Dst);
11511 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11512 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11513 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11515
11516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11517 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11518 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11520 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11521 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11522
11523 IEM_MC_PREPARE_SSE_USAGE();
11524 IEM_MC_REF_MXCSR(pfMxcsr);
11525 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11526 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11527 IEM_MC_IF_MXCSR_XCPT_PENDING()
11528 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11529 IEM_MC_ELSE()
11530 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11531 IEM_MC_ENDIF();
11532
11533 IEM_MC_ADVANCE_RIP();
11534 IEM_MC_END();
11535 }
11536 return VINF_SUCCESS;
11537}
11538
11539
11540/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11541FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11542{
11543 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11544
11545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11546 if (IEM_IS_MODRM_REG_MODE(bRm))
11547 {
11548 /*
11549 * Register, register.
11550 */
11551 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11553 IEM_MC_BEGIN(4, 2);
11554 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11555 IEM_MC_LOCAL(X86XMMREG, Dst);
11556 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11557 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11558 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11559 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11560 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11561 IEM_MC_PREPARE_SSE_USAGE();
11562 IEM_MC_REF_MXCSR(pfMxcsr);
11563 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11564 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11565 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11566 IEM_MC_IF_MXCSR_XCPT_PENDING()
11567 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11568 IEM_MC_ELSE()
11569 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11570 IEM_MC_ENDIF();
11571
11572 IEM_MC_ADVANCE_RIP();
11573 IEM_MC_END();
11574 }
11575 else
11576 {
11577 /*
11578 * Register, memory.
11579 */
11580 IEM_MC_BEGIN(4, 3);
11581 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11582 IEM_MC_LOCAL(X86XMMREG, Dst);
11583 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11584 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11585 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11587
11588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11589 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11590 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11592 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11593 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11594
11595 IEM_MC_PREPARE_SSE_USAGE();
11596 IEM_MC_REF_MXCSR(pfMxcsr);
11597 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11598 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11599 IEM_MC_IF_MXCSR_XCPT_PENDING()
11600 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11601 IEM_MC_ELSE()
11602 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11603 IEM_MC_ENDIF();
11604
11605 IEM_MC_ADVANCE_RIP();
11606 IEM_MC_END();
11607 }
11608 return VINF_SUCCESS;
11609}
11610
11611
11612/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11613FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11614{
11615 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11616
11617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11618 if (IEM_IS_MODRM_REG_MODE(bRm))
11619 {
11620 /*
11621 * Register, register.
11622 */
11623 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11625 IEM_MC_BEGIN(4, 2);
11626 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11627 IEM_MC_LOCAL(X86XMMREG, Dst);
11628 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11629 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11630 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11631 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11632 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11633 IEM_MC_PREPARE_SSE_USAGE();
11634 IEM_MC_REF_MXCSR(pfMxcsr);
11635 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11636 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11637 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11638 IEM_MC_IF_MXCSR_XCPT_PENDING()
11639 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11640 IEM_MC_ELSE()
11641 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11642 IEM_MC_ENDIF();
11643
11644 IEM_MC_ADVANCE_RIP();
11645 IEM_MC_END();
11646 }
11647 else
11648 {
11649 /*
11650 * Register, memory.
11651 */
11652 IEM_MC_BEGIN(4, 3);
11653 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11654 IEM_MC_LOCAL(X86XMMREG, Dst);
11655 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11656 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11657 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11659
11660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11661 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11662 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11664 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11665 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11666
11667 IEM_MC_PREPARE_SSE_USAGE();
11668 IEM_MC_REF_MXCSR(pfMxcsr);
11669 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11670 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11671 IEM_MC_IF_MXCSR_XCPT_PENDING()
11672 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11673 IEM_MC_ELSE()
11674 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11675 IEM_MC_ENDIF();
11676
11677 IEM_MC_ADVANCE_RIP();
11678 IEM_MC_END();
11679 }
11680 return VINF_SUCCESS;
11681}
11682
11683
11684/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11685FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11686{
11687 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11688
11689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11690 if (IEM_IS_MODRM_REG_MODE(bRm))
11691 {
11692 /*
11693 * Register, register.
11694 */
11695 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11697 IEM_MC_BEGIN(4, 2);
11698 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11699 IEM_MC_LOCAL(X86XMMREG, Dst);
11700 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11701 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11702 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11703 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11704 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11705 IEM_MC_PREPARE_SSE_USAGE();
11706 IEM_MC_REF_MXCSR(pfMxcsr);
11707 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11708 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11709 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11710 IEM_MC_IF_MXCSR_XCPT_PENDING()
11711 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11712 IEM_MC_ELSE()
11713 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11714 IEM_MC_ENDIF();
11715
11716 IEM_MC_ADVANCE_RIP();
11717 IEM_MC_END();
11718 }
11719 else
11720 {
11721 /*
11722 * Register, memory.
11723 */
11724 IEM_MC_BEGIN(4, 3);
11725 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11726 IEM_MC_LOCAL(X86XMMREG, Dst);
11727 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11728 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11729 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11731
11732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11733 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11734 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11736 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11737 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11738
11739 IEM_MC_PREPARE_SSE_USAGE();
11740 IEM_MC_REF_MXCSR(pfMxcsr);
11741 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11742 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11743 IEM_MC_IF_MXCSR_XCPT_PENDING()
11744 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11745 IEM_MC_ELSE()
11746 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11747 IEM_MC_ENDIF();
11748
11749 IEM_MC_ADVANCE_RIP();
11750 IEM_MC_END();
11751 }
11752 return VINF_SUCCESS;
11753}
11754
11755
11756/** Opcode 0x0f 0xc3. */
11757FNIEMOP_DEF(iemOp_movnti_My_Gy)
11758{
11759 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11760
11761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11762
11763 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11764 if (IEM_IS_MODRM_MEM_MODE(bRm))
11765 {
11766 switch (pVCpu->iem.s.enmEffOpSize)
11767 {
11768 case IEMMODE_32BIT:
11769 IEM_MC_BEGIN(0, 2);
11770 IEM_MC_LOCAL(uint32_t, u32Value);
11771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11772
11773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11776 return IEMOP_RAISE_INVALID_OPCODE();
11777
11778 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11779 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11780 IEM_MC_ADVANCE_RIP();
11781 IEM_MC_END();
11782 break;
11783
11784 case IEMMODE_64BIT:
11785 IEM_MC_BEGIN(0, 2);
11786 IEM_MC_LOCAL(uint64_t, u64Value);
11787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11788
11789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11791 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11792 return IEMOP_RAISE_INVALID_OPCODE();
11793
11794 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11795 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11796 IEM_MC_ADVANCE_RIP();
11797 IEM_MC_END();
11798 break;
11799
11800 case IEMMODE_16BIT:
11801 /** @todo check this form. */
11802 return IEMOP_RAISE_INVALID_OPCODE();
11803 }
11804 }
11805 else
11806 return IEMOP_RAISE_INVALID_OPCODE();
11807 return VINF_SUCCESS;
11808}
11809
11810
11811/* Opcode 0x66 0x0f 0xc3 - invalid */
11812/* Opcode 0xf3 0x0f 0xc3 - invalid */
11813/* Opcode 0xf2 0x0f 0xc3 - invalid */
11814
11815
11816/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11817FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11818{
11819 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11820 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11821 if (IEM_IS_MODRM_REG_MODE(bRm))
11822 {
11823 /*
11824 * Register, register.
11825 */
11826 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11828 IEM_MC_BEGIN(3, 0);
11829 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11830 IEM_MC_ARG(uint16_t, u16Src, 1);
11831 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11832 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11833 IEM_MC_PREPARE_FPU_USAGE();
11834 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11835 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11836 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11837 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11838 IEM_MC_FPU_TO_MMX_MODE();
11839 IEM_MC_ADVANCE_RIP();
11840 IEM_MC_END();
11841 }
11842 else
11843 {
11844 /*
11845 * Register, memory.
11846 */
11847 IEM_MC_BEGIN(3, 2);
11848 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11849 IEM_MC_ARG(uint16_t, u16Src, 1);
11850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11851
11852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11853 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11854 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11856 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11857 IEM_MC_PREPARE_FPU_USAGE();
11858
11859 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11860 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11861 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11862 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11863 IEM_MC_FPU_TO_MMX_MODE();
11864 IEM_MC_ADVANCE_RIP();
11865 IEM_MC_END();
11866 }
11867 return VINF_SUCCESS;
11868}
11869
11870
11871/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11872FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11873{
11874 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11876 if (IEM_IS_MODRM_REG_MODE(bRm))
11877 {
11878 /*
11879 * Register, register.
11880 */
11881 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11883 IEM_MC_BEGIN(3, 0);
11884 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11885 IEM_MC_ARG(uint16_t, u16Src, 1);
11886 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11887 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11888 IEM_MC_PREPARE_SSE_USAGE();
11889 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11890 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11891 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11892 IEM_MC_ADVANCE_RIP();
11893 IEM_MC_END();
11894 }
11895 else
11896 {
11897 /*
11898 * Register, memory.
11899 */
11900 IEM_MC_BEGIN(3, 2);
11901 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11902 IEM_MC_ARG(uint16_t, u16Src, 1);
11903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11904
11905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11906 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11907 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11909 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11910 IEM_MC_PREPARE_SSE_USAGE();
11911
11912 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11913 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11914 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11915 IEM_MC_ADVANCE_RIP();
11916 IEM_MC_END();
11917 }
11918 return VINF_SUCCESS;
11919}
11920
11921
11922/* Opcode 0xf3 0x0f 0xc4 - invalid */
11923/* Opcode 0xf2 0x0f 0xc4 - invalid */
11924
11925
11926/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11927FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11928{
11929 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pinsrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);*/ /** @todo */
11930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11931 if (IEM_IS_MODRM_REG_MODE(bRm))
11932 {
11933 /*
11934 * Register, register.
11935 */
11936 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11938 IEM_MC_BEGIN(3, 1);
11939 IEM_MC_LOCAL(uint16_t, u16Dst);
11940 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11941 IEM_MC_ARG(uint64_t, u64Src, 1);
11942 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11943 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11944 IEM_MC_PREPARE_FPU_USAGE();
11945 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11946 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bEvilArg);
11947 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11948 IEM_MC_FPU_TO_MMX_MODE();
11949 IEM_MC_ADVANCE_RIP();
11950 IEM_MC_END();
11951 return VINF_SUCCESS;
11952 }
11953
11954 /* No memory operand. */
11955 return IEMOP_RAISE_INVALID_OPCODE();
11956}
11957
11958
11959/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11960FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11961{
11962 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11964 if (IEM_IS_MODRM_REG_MODE(bRm))
11965 {
11966 /*
11967 * Register, register.
11968 */
11969 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11971 IEM_MC_BEGIN(3, 1);
11972 IEM_MC_LOCAL(uint16_t, u16Dst);
11973 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11974 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11975 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11976 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11977 IEM_MC_PREPARE_SSE_USAGE();
11978 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11979 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bEvilArg);
11980 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11981 IEM_MC_ADVANCE_RIP();
11982 IEM_MC_END();
11983 return VINF_SUCCESS;
11984 }
11985
11986 /* No memory operand. */
11987 return IEMOP_RAISE_INVALID_OPCODE();
11988}
11989
11990
11991/* Opcode 0xf3 0x0f 0xc5 - invalid */
11992/* Opcode 0xf2 0x0f 0xc5 - invalid */
11993
11994
11995/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11996FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11997{
11998 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12000 if (IEM_IS_MODRM_REG_MODE(bRm))
12001 {
12002 /*
12003 * Register, register.
12004 */
12005 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
12006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12007 IEM_MC_BEGIN(3, 0);
12008 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12009 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12010 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
12011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12012 IEM_MC_PREPARE_SSE_USAGE();
12013 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12014 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12015 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
12016 IEM_MC_ADVANCE_RIP();
12017 IEM_MC_END();
12018 }
12019 else
12020 {
12021 /*
12022 * Register, memory.
12023 */
12024 IEM_MC_BEGIN(3, 2);
12025 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12026 IEM_MC_LOCAL(RTUINT128U, uSrc);
12027 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12029
12030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12031 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
12032 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
12033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12034 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12035 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12036
12037 IEM_MC_PREPARE_SSE_USAGE();
12038 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12039 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
12040
12041 IEM_MC_ADVANCE_RIP();
12042 IEM_MC_END();
12043 }
12044 return VINF_SUCCESS;
12045}
12046
12047
12048/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12049FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12050{
12051 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12053 if (IEM_IS_MODRM_REG_MODE(bRm))
12054 {
12055 /*
12056 * Register, register.
12057 */
12058 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
12059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12060 IEM_MC_BEGIN(3, 0);
12061 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12062 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12063 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
12064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12065 IEM_MC_PREPARE_SSE_USAGE();
12066 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12067 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12068 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
12069 IEM_MC_ADVANCE_RIP();
12070 IEM_MC_END();
12071 }
12072 else
12073 {
12074 /*
12075 * Register, memory.
12076 */
12077 IEM_MC_BEGIN(3, 2);
12078 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12079 IEM_MC_LOCAL(RTUINT128U, uSrc);
12080 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12082
12083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12084 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
12085 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
12086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12087 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12088 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12089
12090 IEM_MC_PREPARE_SSE_USAGE();
12091 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12092 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
12093
12094 IEM_MC_ADVANCE_RIP();
12095 IEM_MC_END();
12096 }
12097 return VINF_SUCCESS;
12098}
12099
12100
12101/* Opcode 0xf3 0x0f 0xc6 - invalid */
12102/* Opcode 0xf2 0x0f 0xc6 - invalid */
12103
12104
12105/** Opcode 0x0f 0xc7 !11/1. */
12106FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12107{
12108 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12109
12110 IEM_MC_BEGIN(4, 3);
12111 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12112 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12113 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12114 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12115 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12116 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12118
12119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12120 IEMOP_HLP_DONE_DECODING();
12121 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12122
12123 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12124 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12125 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12126
12127 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12128 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12129 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12130
12131 IEM_MC_FETCH_EFLAGS(EFlags);
12132 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12133 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12134 else
12135 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12136
12137 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12138 IEM_MC_COMMIT_EFLAGS(EFlags);
12139 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12140 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
12141 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12142 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12143 IEM_MC_ENDIF();
12144 IEM_MC_ADVANCE_RIP();
12145
12146 IEM_MC_END();
12147 return VINF_SUCCESS;
12148}
12149
12150
12151/** Opcode REX.W 0x0f 0xc7 !11/1. */
12152FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12153{
12154 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12155 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12156 {
12157#if 0
12158 RT_NOREF(bRm);
12159 IEMOP_BITCH_ABOUT_STUB();
12160 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
12161#else
12162 IEM_MC_BEGIN(4, 3);
12163 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12164 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12165 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12166 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12167 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12168 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12170
12171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12172 IEMOP_HLP_DONE_DECODING();
12173 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12174 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12175
12176 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12177 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12178 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12179
12180 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12181 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12182 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12183
12184 IEM_MC_FETCH_EFLAGS(EFlags);
12185# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
12186# if defined(RT_ARCH_AMD64)
12187 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12188# endif
12189 {
12190 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12191 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12192 else
12193 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12194 }
12195# if defined(RT_ARCH_AMD64)
12196 else
12197# endif
12198# endif
12199# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
12200 {
12201 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12202 accesses and not all all atomic, which works fine on in UNI CPU guest
12203 configuration (ignoring DMA). If guest SMP is active we have no choice
12204 but to use a rendezvous callback here. Sigh. */
12205 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12206 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12207 else
12208 {
12209 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12210 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12211 }
12212 }
12213# endif
12214
12215 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12216 IEM_MC_COMMIT_EFLAGS(EFlags);
12217 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12218 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12219 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12220 IEM_MC_ENDIF();
12221 IEM_MC_ADVANCE_RIP();
12222
12223 IEM_MC_END();
12224 return VINF_SUCCESS;
12225#endif
12226 }
12227 Log(("cmpxchg16b -> #UD\n"));
12228 return IEMOP_RAISE_INVALID_OPCODE();
12229}
12230
12231FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12232{
12233 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12234 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12235 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12236}
12237
12238/** Opcode 0x0f 0xc7 11/6. */
12239FNIEMOP_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
12240
12241/** Opcode 0x0f 0xc7 !11/6. */
12242#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12243FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12244{
12245 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12246 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12247 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12248 IEM_MC_BEGIN(2, 0);
12249 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12250 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12252 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12253 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12254 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12255 IEM_MC_END();
12256 return VINF_SUCCESS;
12257}
12258#else
12259FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12260#endif
12261
12262/** Opcode 0x66 0x0f 0xc7 !11/6. */
12263#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12264FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12265{
12266 IEMOP_MNEMONIC(vmclear, "vmclear");
12267 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12268 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12269 IEM_MC_BEGIN(2, 0);
12270 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12271 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12273 IEMOP_HLP_DONE_DECODING();
12274 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12275 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12276 IEM_MC_END();
12277 return VINF_SUCCESS;
12278}
12279#else
12280FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12281#endif
12282
12283/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12284#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12285FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12286{
12287 IEMOP_MNEMONIC(vmxon, "vmxon");
12288 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12289 IEM_MC_BEGIN(2, 0);
12290 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12291 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12293 IEMOP_HLP_DONE_DECODING();
12294 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12295 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12296 IEM_MC_END();
12297 return VINF_SUCCESS;
12298}
12299#else
12300FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12301#endif
12302
12303/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12304#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12305FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12306{
12307 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12308 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12309 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12310 IEM_MC_BEGIN(2, 0);
12311 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12312 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12314 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12315 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12316 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12317 IEM_MC_END();
12318 return VINF_SUCCESS;
12319}
12320#else
12321FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12322#endif
12323
12324/** Opcode 0x0f 0xc7 11/7. */
12325FNIEMOP_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
12326
12327
12328/**
12329 * Group 9 jump table for register variant.
12330 */
12331IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12332{ /* pfx: none, 066h, 0f3h, 0f2h */
12333 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12334 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12335 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12336 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12337 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12338 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12339 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12340 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12341};
12342AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12343
12344
12345/**
12346 * Group 9 jump table for memory variant.
12347 */
12348IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12349{ /* pfx: none, 066h, 0f3h, 0f2h */
12350 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12351 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12352 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12353 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12354 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12355 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12356 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12357 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12358};
12359AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12360
12361
12362/** Opcode 0x0f 0xc7. */
12363FNIEMOP_DEF(iemOp_Grp9)
12364{
12365 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
12366 if (IEM_IS_MODRM_REG_MODE(bRm))
12367 /* register, register */
12368 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12369 + pVCpu->iem.s.idxPrefix], bRm);
12370 /* memory, register */
12371 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12372 + pVCpu->iem.s.idxPrefix], bRm);
12373}
12374
12375
12376/**
12377 * Common 'bswap register' helper.
12378 */
12379FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12380{
12381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12382 switch (pVCpu->iem.s.enmEffOpSize)
12383 {
12384 case IEMMODE_16BIT:
12385 IEM_MC_BEGIN(1, 0);
12386 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12387 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12388 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12389 IEM_MC_ADVANCE_RIP();
12390 IEM_MC_END();
12391 return VINF_SUCCESS;
12392
12393 case IEMMODE_32BIT:
12394 IEM_MC_BEGIN(1, 0);
12395 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12396 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12397 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12398 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12399 IEM_MC_ADVANCE_RIP();
12400 IEM_MC_END();
12401 return VINF_SUCCESS;
12402
12403 case IEMMODE_64BIT:
12404 IEM_MC_BEGIN(1, 0);
12405 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12406 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12407 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12408 IEM_MC_ADVANCE_RIP();
12409 IEM_MC_END();
12410 return VINF_SUCCESS;
12411
12412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12413 }
12414}
12415
12416
12417/** Opcode 0x0f 0xc8. */
12418FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12419{
12420 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12421 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12422 prefix. REX.B is the correct prefix it appears. For a parallel
12423 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12424 IEMOP_HLP_MIN_486();
12425 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12426}
12427
12428
12429/** Opcode 0x0f 0xc9. */
12430FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12431{
12432 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12433 IEMOP_HLP_MIN_486();
12434 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12435}
12436
12437
12438/** Opcode 0x0f 0xca. */
12439FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12440{
12441 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
12442 IEMOP_HLP_MIN_486();
12443 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12444}
12445
12446
12447/** Opcode 0x0f 0xcb. */
12448FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12449{
12450 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
12451 IEMOP_HLP_MIN_486();
12452 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12453}
12454
12455
12456/** Opcode 0x0f 0xcc. */
12457FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12458{
12459 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12460 IEMOP_HLP_MIN_486();
12461 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12462}
12463
12464
12465/** Opcode 0x0f 0xcd. */
12466FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12467{
12468 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12469 IEMOP_HLP_MIN_486();
12470 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12471}
12472
12473
12474/** Opcode 0x0f 0xce. */
12475FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12476{
12477 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12478 IEMOP_HLP_MIN_486();
12479 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12480}
12481
12482
12483/** Opcode 0x0f 0xcf. */
12484FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12485{
12486 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12487 IEMOP_HLP_MIN_486();
12488 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12489}
12490
12491
12492/* Opcode 0x0f 0xd0 - invalid */
12493
12494
12495/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12496FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12497{
12498 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
12499 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12500}
12501
12502
12503/* Opcode 0xf3 0x0f 0xd0 - invalid */
12504
12505
12506/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12507FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12508{
12509 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
12510 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12511}
12512
12513
12514
12515/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12516FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12517{
12518 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12519 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12520}
12521
12522/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12523FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12524{
12525 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12526 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12527}
12528
12529/* Opcode 0xf3 0x0f 0xd1 - invalid */
12530/* Opcode 0xf2 0x0f 0xd1 - invalid */
12531
12532/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12533FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12534{
12535 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12536 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12537}
12538
12539
12540/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12541FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12542{
12543 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12544 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12545}
12546
12547
12548/* Opcode 0xf3 0x0f 0xd2 - invalid */
12549/* Opcode 0xf2 0x0f 0xd2 - invalid */
12550
12551/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12552FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12553{
12554 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12555 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12556}
12557
12558
12559/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12560FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12561{
12562 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12563 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12564}
12565
12566
12567/* Opcode 0xf3 0x0f 0xd3 - invalid */
12568/* Opcode 0xf2 0x0f 0xd3 - invalid */
12569
12570
12571/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12572FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12573{
12574 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12575 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12576}
12577
12578
12579/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12580FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12581{
12582 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12583 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12584}
12585
12586
12587/* Opcode 0xf3 0x0f 0xd4 - invalid */
12588/* Opcode 0xf2 0x0f 0xd4 - invalid */
12589
12590/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12591FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12592{
12593 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12594 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12595}
12596
12597/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12598FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12599{
12600 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12601 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12602}
12603
12604
12605/* Opcode 0xf3 0x0f 0xd5 - invalid */
12606/* Opcode 0xf2 0x0f 0xd5 - invalid */
12607
12608/* Opcode 0x0f 0xd6 - invalid */
12609
12610/**
12611 * @opcode 0xd6
12612 * @oppfx 0x66
12613 * @opcpuid sse2
12614 * @opgroup og_sse2_pcksclr_datamove
12615 * @opxcpttype none
12616 * @optest op1=-1 op2=2 -> op1=2
12617 * @optest op1=0 op2=-42 -> op1=-42
12618 */
12619FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12620{
12621 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12623 if (IEM_IS_MODRM_REG_MODE(bRm))
12624 {
12625 /*
12626 * Register, register.
12627 */
12628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12629 IEM_MC_BEGIN(0, 2);
12630 IEM_MC_LOCAL(uint64_t, uSrc);
12631
12632 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12633 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12634
12635 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12636 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12637
12638 IEM_MC_ADVANCE_RIP();
12639 IEM_MC_END();
12640 }
12641 else
12642 {
12643 /*
12644 * Memory, register.
12645 */
12646 IEM_MC_BEGIN(0, 2);
12647 IEM_MC_LOCAL(uint64_t, uSrc);
12648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12649
12650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12652 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12653 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12654
12655 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12656 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12657
12658 IEM_MC_ADVANCE_RIP();
12659 IEM_MC_END();
12660 }
12661 return VINF_SUCCESS;
12662}
12663
12664
12665/**
12666 * @opcode 0xd6
12667 * @opcodesub 11 mr/reg
12668 * @oppfx f3
12669 * @opcpuid sse2
12670 * @opgroup og_sse2_simdint_datamove
12671 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12672 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12673 */
12674FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12675{
12676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12677 if (IEM_IS_MODRM_REG_MODE(bRm))
12678 {
12679 /*
12680 * Register, register.
12681 */
12682 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12684 IEM_MC_BEGIN(0, 1);
12685 IEM_MC_LOCAL(uint64_t, uSrc);
12686
12687 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12688 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12689 IEM_MC_FPU_TO_MMX_MODE();
12690
12691 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12692 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12693
12694 IEM_MC_ADVANCE_RIP();
12695 IEM_MC_END();
12696 return VINF_SUCCESS;
12697 }
12698
12699 /**
12700 * @opdone
12701 * @opmnemonic udf30fd6mem
12702 * @opcode 0xd6
12703 * @opcodesub !11 mr/reg
12704 * @oppfx f3
12705 * @opunused intel-modrm
12706 * @opcpuid sse
12707 * @optest ->
12708 */
12709 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12710}
12711
12712
12713/**
12714 * @opcode 0xd6
12715 * @opcodesub 11 mr/reg
12716 * @oppfx f2
12717 * @opcpuid sse2
12718 * @opgroup og_sse2_simdint_datamove
12719 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12720 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12721 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12722 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12723 * @optest op1=-42 op2=0xfedcba9876543210
12724 * -> op1=0xfedcba9876543210 ftw=0xff
12725 */
12726FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12727{
12728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12729 if (IEM_IS_MODRM_REG_MODE(bRm))
12730 {
12731 /*
12732 * Register, register.
12733 */
12734 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12736 IEM_MC_BEGIN(0, 1);
12737 IEM_MC_LOCAL(uint64_t, uSrc);
12738
12739 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12740 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12741 IEM_MC_FPU_TO_MMX_MODE();
12742
12743 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12744 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12745
12746 IEM_MC_ADVANCE_RIP();
12747 IEM_MC_END();
12748 return VINF_SUCCESS;
12749 }
12750
12751 /**
12752 * @opdone
12753 * @opmnemonic udf20fd6mem
12754 * @opcode 0xd6
12755 * @opcodesub !11 mr/reg
12756 * @oppfx f2
12757 * @opunused intel-modrm
12758 * @opcpuid sse
12759 * @optest ->
12760 */
12761 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12762}
12763
12764
12765/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12766FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12767{
12768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12769 /* Docs says register only. */
12770 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12771 {
12772 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12773 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
12774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12775 IEM_MC_BEGIN(2, 0);
12776 IEM_MC_ARG(uint64_t *, puDst, 0);
12777 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12778 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12779 IEM_MC_PREPARE_FPU_USAGE();
12780 IEM_MC_FPU_TO_MMX_MODE();
12781
12782 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
12783 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12784 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12785
12786 IEM_MC_ADVANCE_RIP();
12787 IEM_MC_END();
12788 return VINF_SUCCESS;
12789 }
12790 return IEMOP_RAISE_INVALID_OPCODE();
12791}
12792
12793
12794/** Opcode 0x66 0x0f 0xd7 - */
12795FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12796{
12797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12798 /* Docs says register only. */
12799 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12800 {
12801 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12802 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
12803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12804 IEM_MC_BEGIN(2, 0);
12805 IEM_MC_ARG(uint64_t *, puDst, 0);
12806 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12807 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12808 IEM_MC_PREPARE_SSE_USAGE();
12809 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12810 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12811 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12812 IEM_MC_ADVANCE_RIP();
12813 IEM_MC_END();
12814 return VINF_SUCCESS;
12815 }
12816 return IEMOP_RAISE_INVALID_OPCODE();
12817}
12818
12819
12820/* Opcode 0xf3 0x0f 0xd7 - invalid */
12821/* Opcode 0xf2 0x0f 0xd7 - invalid */
12822
12823
12824/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12825FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12826{
12827 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12828 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12829}
12830
12831
12832/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12833FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12834{
12835 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12836 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12837}
12838
12839
12840/* Opcode 0xf3 0x0f 0xd8 - invalid */
12841/* Opcode 0xf2 0x0f 0xd8 - invalid */
12842
12843/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12844FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12845{
12846 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12847 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12848}
12849
12850
12851/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12852FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12853{
12854 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12855 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12856}
12857
12858
12859/* Opcode 0xf3 0x0f 0xd9 - invalid */
12860/* Opcode 0xf2 0x0f 0xd9 - invalid */
12861
12862/** Opcode 0x0f 0xda - pminub Pq, Qq */
12863FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12864{
12865 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12866 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12867}
12868
12869
12870/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12871FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12872{
12873 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12874 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12875}
12876
12877/* Opcode 0xf3 0x0f 0xda - invalid */
12878/* Opcode 0xf2 0x0f 0xda - invalid */
12879
12880/** Opcode 0x0f 0xdb - pand Pq, Qq */
12881FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12882{
12883 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12884 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12885}
12886
12887
12888/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12889FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12890{
12891 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12892 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12893}
12894
12895
12896/* Opcode 0xf3 0x0f 0xdb - invalid */
12897/* Opcode 0xf2 0x0f 0xdb - invalid */
12898
12899/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12900FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12901{
12902 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12903 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12904}
12905
12906
12907/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12908FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12909{
12910 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12911 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12912}
12913
12914
12915/* Opcode 0xf3 0x0f 0xdc - invalid */
12916/* Opcode 0xf2 0x0f 0xdc - invalid */
12917
12918/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12919FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12920{
12921 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12922 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12923}
12924
12925
12926/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
12927FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
12928{
12929 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12930 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
12931}
12932
12933
12934/* Opcode 0xf3 0x0f 0xdd - invalid */
12935/* Opcode 0xf2 0x0f 0xdd - invalid */
12936
12937/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
12938FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
12939{
12940 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12941 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
12942}
12943
12944
12945/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
12946FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
12947{
12948 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12949 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
12950}
12951
12952/* Opcode 0xf3 0x0f 0xde - invalid */
12953/* Opcode 0xf2 0x0f 0xde - invalid */
12954
12955
12956/** Opcode 0x0f 0xdf - pandn Pq, Qq */
12957FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
12958{
12959 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12960 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
12961}
12962
12963
12964/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
12965FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
12966{
12967 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12968 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
12969}
12970
12971
12972/* Opcode 0xf3 0x0f 0xdf - invalid */
12973/* Opcode 0xf2 0x0f 0xdf - invalid */
12974
12975/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
12976FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
12977{
12978 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12979 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
12980}
12981
12982
12983/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
12984FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
12985{
12986 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12987 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
12988}
12989
12990
12991/* Opcode 0xf3 0x0f 0xe0 - invalid */
12992/* Opcode 0xf2 0x0f 0xe0 - invalid */
12993
12994/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
12995FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
12996{
12997 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12998 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
12999}
13000
13001
13002/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13003FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13004{
13005 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13006 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13007}
13008
13009
13010/* Opcode 0xf3 0x0f 0xe1 - invalid */
13011/* Opcode 0xf2 0x0f 0xe1 - invalid */
13012
13013/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13014FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13015{
13016 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13017 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13018}
13019
13020
13021/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13022FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13023{
13024 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13025 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13026}
13027
13028
13029/* Opcode 0xf3 0x0f 0xe2 - invalid */
13030/* Opcode 0xf2 0x0f 0xe2 - invalid */
13031
13032/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13033FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13034{
13035 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13036 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13037}
13038
13039
13040/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13041FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13042{
13043 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13044 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13045}
13046
13047
13048/* Opcode 0xf3 0x0f 0xe3 - invalid */
13049/* Opcode 0xf2 0x0f 0xe3 - invalid */
13050
13051/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13052FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13053{
13054 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13055 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13056}
13057
13058
13059/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13060FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13061{
13062 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13063 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13064}
13065
13066
13067/* Opcode 0xf3 0x0f 0xe4 - invalid */
13068/* Opcode 0xf2 0x0f 0xe4 - invalid */
13069
13070/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13071FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13072{
13073 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13074 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13075}
13076
13077
13078/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13079FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13080{
13081 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13082 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13083}
13084
13085
13086/* Opcode 0xf3 0x0f 0xe5 - invalid */
13087/* Opcode 0xf2 0x0f 0xe5 - invalid */
13088/* Opcode 0x0f 0xe6 - invalid */
13089
13090
13091/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13092FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13093{
13094 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13095 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13096}
13097
13098
13099/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13100FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13101{
13102 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13103 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13104}
13105
13106
13107/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13108FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13109{
13110 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13111 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13112}
13113
13114
13115/**
13116 * @opcode 0xe7
13117 * @opcodesub !11 mr/reg
13118 * @oppfx none
13119 * @opcpuid sse
13120 * @opgroup og_sse1_cachect
13121 * @opxcpttype none
13122 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13123 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13124 */
13125FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13126{
13127 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13129 if (IEM_IS_MODRM_MEM_MODE(bRm))
13130 {
13131 /* Register, memory. */
13132 IEM_MC_BEGIN(0, 2);
13133 IEM_MC_LOCAL(uint64_t, uSrc);
13134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13135
13136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13138 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13139 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13140 IEM_MC_FPU_TO_MMX_MODE();
13141
13142 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13143 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13144
13145 IEM_MC_ADVANCE_RIP();
13146 IEM_MC_END();
13147 return VINF_SUCCESS;
13148 }
13149 /**
13150 * @opdone
13151 * @opmnemonic ud0fe7reg
13152 * @opcode 0xe7
13153 * @opcodesub 11 mr/reg
13154 * @oppfx none
13155 * @opunused immediate
13156 * @opcpuid sse
13157 * @optest ->
13158 */
13159 return IEMOP_RAISE_INVALID_OPCODE();
13160}
13161
13162/**
13163 * @opcode 0xe7
13164 * @opcodesub !11 mr/reg
13165 * @oppfx 0x66
13166 * @opcpuid sse2
13167 * @opgroup og_sse2_cachect
13168 * @opxcpttype 1
13169 * @optest op1=-1 op2=2 -> op1=2
13170 * @optest op1=0 op2=-42 -> op1=-42
13171 */
13172FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13173{
13174 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13176 if (IEM_IS_MODRM_MEM_MODE(bRm))
13177 {
13178 /* Register, memory. */
13179 IEM_MC_BEGIN(0, 2);
13180 IEM_MC_LOCAL(RTUINT128U, uSrc);
13181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13182
13183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13185 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
13186 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13187
13188 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13189 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13190
13191 IEM_MC_ADVANCE_RIP();
13192 IEM_MC_END();
13193 return VINF_SUCCESS;
13194 }
13195
13196 /**
13197 * @opdone
13198 * @opmnemonic ud660fe7reg
13199 * @opcode 0xe7
13200 * @opcodesub 11 mr/reg
13201 * @oppfx 0x66
13202 * @opunused immediate
13203 * @opcpuid sse
13204 * @optest ->
13205 */
13206 return IEMOP_RAISE_INVALID_OPCODE();
13207}
13208
13209/* Opcode 0xf3 0x0f 0xe7 - invalid */
13210/* Opcode 0xf2 0x0f 0xe7 - invalid */
13211
13212
13213/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13214FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13215{
13216 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13217 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13218}
13219
13220
13221/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13222FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13223{
13224 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13225 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13226}
13227
13228
13229/* Opcode 0xf3 0x0f 0xe8 - invalid */
13230/* Opcode 0xf2 0x0f 0xe8 - invalid */
13231
13232/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13233FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13234{
13235 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13236 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13237}
13238
13239
13240/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13241FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13242{
13243 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13244 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13245}
13246
13247
13248/* Opcode 0xf3 0x0f 0xe9 - invalid */
13249/* Opcode 0xf2 0x0f 0xe9 - invalid */
13250
13251
13252/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13253FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13254{
13255 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13256 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13257}
13258
13259
13260/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13261FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13262{
13263 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13264 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13265}
13266
13267
13268/* Opcode 0xf3 0x0f 0xea - invalid */
13269/* Opcode 0xf2 0x0f 0xea - invalid */
13270
13271
13272/** Opcode 0x0f 0xeb - por Pq, Qq */
13273FNIEMOP_DEF(iemOp_por_Pq_Qq)
13274{
13275 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13276 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13277}
13278
13279
13280/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13281FNIEMOP_DEF(iemOp_por_Vx_Wx)
13282{
13283 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13284 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13285}
13286
13287
13288/* Opcode 0xf3 0x0f 0xeb - invalid */
13289/* Opcode 0xf2 0x0f 0xeb - invalid */
13290
13291/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13292FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13293{
13294 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13295 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13296}
13297
13298
13299/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13300FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13301{
13302 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13303 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13304}
13305
13306
13307/* Opcode 0xf3 0x0f 0xec - invalid */
13308/* Opcode 0xf2 0x0f 0xec - invalid */
13309
13310/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13311FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13312{
13313 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13314 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13315}
13316
13317
13318/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13319FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13320{
13321 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13322 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13323}
13324
13325
13326/* Opcode 0xf3 0x0f 0xed - invalid */
13327/* Opcode 0xf2 0x0f 0xed - invalid */
13328
13329
13330/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13331FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13332{
13333 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13334 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13335}
13336
13337
13338/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13339FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13340{
13341 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13342 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13343}
13344
13345
13346/* Opcode 0xf3 0x0f 0xee - invalid */
13347/* Opcode 0xf2 0x0f 0xee - invalid */
13348
13349
13350/** Opcode 0x0f 0xef - pxor Pq, Qq */
13351FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13352{
13353 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13354 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13355}
13356
13357
13358/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13359FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13360{
13361 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13362 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13363}
13364
13365
13366/* Opcode 0xf3 0x0f 0xef - invalid */
13367/* Opcode 0xf2 0x0f 0xef - invalid */
13368
13369/* Opcode 0x0f 0xf0 - invalid */
13370/* Opcode 0x66 0x0f 0xf0 - invalid */
13371
13372
13373/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13374FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13375{
13376 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13378 if (IEM_IS_MODRM_REG_MODE(bRm))
13379 {
13380 /*
13381 * Register, register - (not implemented, assuming it raises \#UD).
13382 */
13383 return IEMOP_RAISE_INVALID_OPCODE();
13384 }
13385 else
13386 {
13387 /*
13388 * Register, memory.
13389 */
13390 IEM_MC_BEGIN(0, 2);
13391 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13393
13394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13396 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
13397 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13398 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13399 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13400
13401 IEM_MC_ADVANCE_RIP();
13402 IEM_MC_END();
13403 }
13404 return VINF_SUCCESS;
13405}
13406
13407
13408/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13409FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13410{
13411 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13412 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13413}
13414
13415
13416/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13417FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13418{
13419 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13420 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13421}
13422
13423
13424/* Opcode 0xf2 0x0f 0xf1 - invalid */
13425
13426/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13427FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13428{
13429 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13430 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13431}
13432
13433
13434/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13435FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13436{
13437 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13438 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13439}
13440
13441
13442/* Opcode 0xf2 0x0f 0xf2 - invalid */
13443
13444/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13445FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13446{
13447 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13448 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13449}
13450
13451
13452/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13453FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13454{
13455 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13456 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13457}
13458
13459/* Opcode 0xf2 0x0f 0xf3 - invalid */
13460
13461/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13462FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13463{
13464 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13465 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13466}
13467
13468
13469/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13470FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13471{
13472 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13473 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13474}
13475
13476
13477/* Opcode 0xf2 0x0f 0xf4 - invalid */
13478
13479/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13480FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13481{
13482 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13483 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13484}
13485
13486
13487/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13488FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13489{
13490 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13491 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13492}
13493
13494/* Opcode 0xf2 0x0f 0xf5 - invalid */
13495
13496/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13497FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13498{
13499 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13500 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13501}
13502
13503
13504/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13505FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13506{
13507 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13508 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13509}
13510
13511
13512/* Opcode 0xf2 0x0f 0xf6 - invalid */
13513
13514/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13515FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13516/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13517FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13518/* Opcode 0xf2 0x0f 0xf7 - invalid */
13519
13520
13521/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13522FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13523{
13524 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13525 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13526}
13527
13528
13529/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13530FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13531{
13532 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13533 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13534}
13535
13536
13537/* Opcode 0xf2 0x0f 0xf8 - invalid */
13538
13539
13540/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13541FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13542{
13543 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13544 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13545}
13546
13547
13548/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13549FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13550{
13551 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13552 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13553}
13554
13555
13556/* Opcode 0xf2 0x0f 0xf9 - invalid */
13557
13558
13559/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13560FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13561{
13562 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13563 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13564}
13565
13566
13567/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13568FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13569{
13570 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13571 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13572}
13573
13574
13575/* Opcode 0xf2 0x0f 0xfa - invalid */
13576
13577
13578/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13579FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13580{
13581 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13582 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13583}
13584
13585
13586/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13587FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13588{
13589 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13590 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13591}
13592
13593
13594/* Opcode 0xf2 0x0f 0xfb - invalid */
13595
13596
13597/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13598FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13599{
13600 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13601 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13602}
13603
13604
13605/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13606FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13607{
13608 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13609 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13610}
13611
13612
13613/* Opcode 0xf2 0x0f 0xfc - invalid */
13614
13615
13616/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13617FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13618{
13619 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13620 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13621}
13622
13623
13624/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13625FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13626{
13627 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13628 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13629}
13630
13631
13632/* Opcode 0xf2 0x0f 0xfd - invalid */
13633
13634
13635/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13636FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13637{
13638 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13639 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13640}
13641
13642
13643/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13644FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13645{
13646 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13647 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13648}
13649
13650
13651/* Opcode 0xf2 0x0f 0xfe - invalid */
13652
13653
13654/** Opcode **** 0x0f 0xff - UD0 */
13655FNIEMOP_DEF(iemOp_ud0)
13656{
13657 IEMOP_MNEMONIC(ud0, "ud0");
13658 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13659 {
13660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13661#ifndef TST_IEM_CHECK_MC
13662 if (IEM_IS_MODRM_MEM_MODE(bRm))
13663 {
13664 RTGCPTR GCPtrEff;
13665 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13666 if (rcStrict != VINF_SUCCESS)
13667 return rcStrict;
13668 }
13669#endif
13670 IEMOP_HLP_DONE_DECODING();
13671 }
13672 return IEMOP_RAISE_INVALID_OPCODE();
13673}
13674
13675
13676
13677/**
13678 * Two byte opcode map, first byte 0x0f.
13679 *
13680 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13681 * check if it needs updating as well when making changes.
13682 */
13683IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13684{
13685 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13686 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13687 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13688 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13689 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13690 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13691 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13692 /* 0x06 */ IEMOP_X4(iemOp_clts),
13693 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13694 /* 0x08 */ IEMOP_X4(iemOp_invd),
13695 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13696 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13697 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13698 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13699 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13700 /* 0x0e */ IEMOP_X4(iemOp_femms),
13701 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13702
13703 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13704 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13705 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13706 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13707 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13708 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13709 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13710 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13711 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13712 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13713 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13714 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13715 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13716 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13717 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13718 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13719
13720 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13721 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13722 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13723 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13724 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13725 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13726 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13727 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13728 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13729 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13730 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13731 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13732 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13733 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13734 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13735 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13736
13737 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13738 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13739 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13740 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13741 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13742 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13743 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13744 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13745 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13746 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13747 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13748 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13749 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13750 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13751 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13752 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13753
13754 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13755 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13756 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13757 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13758 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13759 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13760 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13761 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13762 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13763 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13764 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13765 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13766 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13767 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13768 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13769 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13770
13771 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13772 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13773 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13774 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13775 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13776 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13777 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13778 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13779 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13780 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13781 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13782 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13783 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13784 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13785 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13786 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13787
13788 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13789 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13790 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13791 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13792 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13793 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13794 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13795 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13796 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13797 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13798 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13799 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13800 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13801 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13802 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13803 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13804
13805 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13806 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13807 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13808 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13809 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13810 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13811 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13812 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13813
13814 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13815 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13816 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13817 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13818 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13819 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13820 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13821 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13822
13823 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13824 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13825 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13826 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13827 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13828 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13829 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13830 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13831 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13832 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13833 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13834 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13835 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13836 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13837 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13838 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13839
13840 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13841 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13842 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13843 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13844 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13845 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13846 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13847 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13848 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13849 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13850 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13851 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13852 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13853 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13854 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13855 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13856
13857 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13858 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13859 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13860 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13861 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13862 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13863 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13864 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13865 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13866 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13867 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13868 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13869 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13870 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13871 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13872 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13873
13874 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13875 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13876 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13877 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13878 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13879 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13880 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13881 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13882 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13883 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13884 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13885 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13886 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13887 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13888 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13889 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13890
13891 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13892 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13893 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13894 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13895 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13896 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13897 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13898 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13899 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13900 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13901 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13902 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13903 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13904 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13905 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13906 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13907
13908 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13909 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13910 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13911 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13912 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13913 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13914 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13915 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13916 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13917 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13918 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13919 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13920 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13921 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13922 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13923 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13924
13925 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13926 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13927 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13928 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13929 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13930 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13931 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13932 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13933 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13934 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13935 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13936 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13937 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13938 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13939 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13940 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13941
13942 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
13943 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13944 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13945 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13946 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13947 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13948 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13949 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13950 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13951 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13952 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13953 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13954 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13955 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13956 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13957 /* 0xff */ IEMOP_X4(iemOp_ud0),
13958};
13959AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
13960
13961/** @} */
13962
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette