VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96914

Last change on this file since 96914 was 96882, checked in by vboxsync, 2 years ago

IEM: Only zap the high half of 64-bit destination register actually used by CMPXCHG, not both possible registers.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 461.6 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96882 2022-09-26 18:56:21Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * Register, register.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * Register, memory.
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP();
92 IEM_MC_END();
93 }
94 return VINF_SUCCESS;
95}
96
97
98/**
99 * Common worker for MMX instructions on the form:
100 * pxxx mm1, mm2/mem64
101 *
102 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
103 * no FXSAVE state, just the operands.
104 */
105FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
106{
107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
108 if (IEM_IS_MODRM_REG_MODE(bRm))
109 {
110 /*
111 * Register, register.
112 */
113 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
114 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
116 IEM_MC_BEGIN(2, 0);
117 IEM_MC_ARG(uint64_t *, pDst, 0);
118 IEM_MC_ARG(uint64_t const *, pSrc, 1);
119 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
120 IEM_MC_PREPARE_FPU_USAGE();
121 IEM_MC_FPU_TO_MMX_MODE();
122
123 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
124 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
125 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
126 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
127
128 IEM_MC_ADVANCE_RIP();
129 IEM_MC_END();
130 }
131 else
132 {
133 /*
134 * Register, memory.
135 */
136 IEM_MC_BEGIN(2, 2);
137 IEM_MC_ARG(uint64_t *, pDst, 0);
138 IEM_MC_LOCAL(uint64_t, uSrc);
139 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
141
142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
144 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
145 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
146
147 IEM_MC_PREPARE_FPU_USAGE();
148 IEM_MC_FPU_TO_MMX_MODE();
149
150 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
151 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
152 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
153
154 IEM_MC_ADVANCE_RIP();
155 IEM_MC_END();
156 }
157 return VINF_SUCCESS;
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * for instructions introduced with SSE.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * Register, register.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
177 IEM_MC_BEGIN(2, 0);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * Register, memory.
196 */
197 IEM_MC_BEGIN(2, 2);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 }
218 return VINF_SUCCESS;
219}
220
221
222/**
223 * Common worker for MMX instructions on the form:
224 * pxxx mm1, mm2/mem64
225 * for instructions introduced with SSE.
226 *
227 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
228 * no FXSAVE state, just the operands.
229 */
230FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
231{
232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
233 if (IEM_IS_MODRM_REG_MODE(bRm))
234 {
235 /*
236 * Register, register.
237 */
238 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
239 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
241 IEM_MC_BEGIN(2, 0);
242 IEM_MC_ARG(uint64_t *, pDst, 0);
243 IEM_MC_ARG(uint64_t const *, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
245 IEM_MC_PREPARE_FPU_USAGE();
246 IEM_MC_FPU_TO_MMX_MODE();
247
248 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
249 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
250 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
251 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
252
253 IEM_MC_ADVANCE_RIP();
254 IEM_MC_END();
255 }
256 else
257 {
258 /*
259 * Register, memory.
260 */
261 IEM_MC_BEGIN(2, 2);
262 IEM_MC_ARG(uint64_t *, pDst, 0);
263 IEM_MC_LOCAL(uint64_t, uSrc);
264 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
266
267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
269 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
270 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
271
272 IEM_MC_PREPARE_FPU_USAGE();
273 IEM_MC_FPU_TO_MMX_MODE();
274
275 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
276 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
277 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
278
279 IEM_MC_ADVANCE_RIP();
280 IEM_MC_END();
281 }
282 return VINF_SUCCESS;
283}
284
285
286/**
287 * Common worker for MMX instructions on the form:
288 * pxxx mm1, mm2/mem64
289 * that was introduced with SSE2.
290 */
291FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
292{
293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
294 if (IEM_IS_MODRM_REG_MODE(bRm))
295 {
296 /*
297 * Register, register.
298 */
299 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
300 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_BEGIN(2, 0);
303 IEM_MC_ARG(uint64_t *, pDst, 0);
304 IEM_MC_ARG(uint64_t const *, pSrc, 1);
305 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
306 IEM_MC_PREPARE_FPU_USAGE();
307 IEM_MC_FPU_TO_MMX_MODE();
308
309 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
310 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313
314 IEM_MC_ADVANCE_RIP();
315 IEM_MC_END();
316 }
317 else
318 {
319 /*
320 * Register, memory.
321 */
322 IEM_MC_BEGIN(2, 2);
323 IEM_MC_ARG(uint64_t *, pDst, 0);
324 IEM_MC_LOCAL(uint64_t, uSrc);
325 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
327
328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
330 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
331 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
332
333 IEM_MC_PREPARE_FPU_USAGE();
334 IEM_MC_FPU_TO_MMX_MODE();
335
336 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
337 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
338 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
339
340 IEM_MC_ADVANCE_RIP();
341 IEM_MC_END();
342 }
343 return VINF_SUCCESS;
344}
345
346
347/**
348 * Common worker for SSE2 instructions on the forms:
349 * pxxx xmm1, xmm2/mem128
350 *
351 * Proper alignment of the 128-bit operand is enforced.
352 * Exceptions type 4. SSE2 cpuid checks.
353 *
354 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
355 */
356FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
357{
358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
359 if (IEM_IS_MODRM_REG_MODE(bRm))
360 {
361 /*
362 * Register, register.
363 */
364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
365 IEM_MC_BEGIN(2, 0);
366 IEM_MC_ARG(PRTUINT128U, pDst, 0);
367 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
368 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
369 IEM_MC_PREPARE_SSE_USAGE();
370 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
371 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
372 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 }
376 else
377 {
378 /*
379 * Register, memory.
380 */
381 IEM_MC_BEGIN(2, 2);
382 IEM_MC_ARG(PRTUINT128U, pDst, 0);
383 IEM_MC_LOCAL(RTUINT128U, uSrc);
384 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
386
387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
389 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
390 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
391
392 IEM_MC_PREPARE_SSE_USAGE();
393 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
394 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
395
396 IEM_MC_ADVANCE_RIP();
397 IEM_MC_END();
398 }
399 return VINF_SUCCESS;
400}
401
402
403/**
404 * Common worker for SSE2 instructions on the forms:
405 * pxxx xmm1, xmm2/mem128
406 *
407 * Proper alignment of the 128-bit operand is enforced.
408 * Exceptions type 4. SSE2 cpuid checks.
409 *
410 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
411 * no FXSAVE state, just the operands.
412 *
413 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
414 */
415FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
416{
417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
418 if (IEM_IS_MODRM_REG_MODE(bRm))
419 {
420 /*
421 * Register, register.
422 */
423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
424 IEM_MC_BEGIN(2, 0);
425 IEM_MC_ARG(PRTUINT128U, pDst, 0);
426 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
427 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
428 IEM_MC_PREPARE_SSE_USAGE();
429 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
430 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
431 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
432 IEM_MC_ADVANCE_RIP();
433 IEM_MC_END();
434 }
435 else
436 {
437 /*
438 * Register, memory.
439 */
440 IEM_MC_BEGIN(2, 2);
441 IEM_MC_ARG(PRTUINT128U, pDst, 0);
442 IEM_MC_LOCAL(RTUINT128U, uSrc);
443 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
445
446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
448 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
449 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
450
451 IEM_MC_PREPARE_SSE_USAGE();
452 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
453 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
454
455 IEM_MC_ADVANCE_RIP();
456 IEM_MC_END();
457 }
458 return VINF_SUCCESS;
459}
460
461
462/**
463 * Common worker for MMX instructions on the forms:
464 * pxxxx mm1, mm2/mem32
465 *
466 * The 2nd operand is the first half of a register, which in the memory case
467 * means a 32-bit memory access.
468 */
469FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
470{
471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
472 if (IEM_IS_MODRM_REG_MODE(bRm))
473 {
474 /*
475 * Register, register.
476 */
477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
478 IEM_MC_BEGIN(2, 0);
479 IEM_MC_ARG(uint64_t *, puDst, 0);
480 IEM_MC_ARG(uint64_t const *, puSrc, 1);
481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_FPU_TO_MMX_MODE();
484
485 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
486 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
487 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
488 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
489
490 IEM_MC_ADVANCE_RIP();
491 IEM_MC_END();
492 }
493 else
494 {
495 /*
496 * Register, memory.
497 */
498 IEM_MC_BEGIN(2, 2);
499 IEM_MC_ARG(uint64_t *, puDst, 0);
500 IEM_MC_LOCAL(uint64_t, uSrc);
501 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
506 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
507 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
508
509 IEM_MC_PREPARE_FPU_USAGE();
510 IEM_MC_FPU_TO_MMX_MODE();
511
512 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
513 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
514 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
515
516 IEM_MC_ADVANCE_RIP();
517 IEM_MC_END();
518 }
519 return VINF_SUCCESS;
520}
521
522
523/**
524 * Common worker for SSE instructions on the forms:
525 * pxxxx xmm1, xmm2/mem128
526 *
527 * The 2nd operand is the first half of a register, which in the memory case
528 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
529 *
530 * Exceptions type 4.
531 */
532FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
533{
534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
535 if (IEM_IS_MODRM_REG_MODE(bRm))
536 {
537 /*
538 * Register, register.
539 */
540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
541 IEM_MC_BEGIN(2, 0);
542 IEM_MC_ARG(PRTUINT128U, puDst, 0);
543 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
548 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 else
553 {
554 /*
555 * Register, memory.
556 */
557 IEM_MC_BEGIN(2, 2);
558 IEM_MC_ARG(PRTUINT128U, puDst, 0);
559 IEM_MC_LOCAL(RTUINT128U, uSrc);
560 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
562
563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
566 /** @todo Most CPUs probably only read the low qword. We read everything to
567 * make sure we apply segmentation and alignment checks correctly.
568 * When we have time, it would be interesting to explore what real
569 * CPUs actually does and whether it will do a TLB load for the high
570 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
571 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
572
573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
574 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
575 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
576
577 IEM_MC_ADVANCE_RIP();
578 IEM_MC_END();
579 }
580 return VINF_SUCCESS;
581}
582
583
584/**
585 * Common worker for SSE2 instructions on the forms:
586 * pxxxx xmm1, xmm2/mem128
587 *
588 * The 2nd operand is the first half of a register, which in the memory case
589 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
590 *
591 * Exceptions type 4.
592 */
593FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
594{
595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
596 if (IEM_IS_MODRM_REG_MODE(bRm))
597 {
598 /*
599 * Register, register.
600 */
601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
602 IEM_MC_BEGIN(2, 0);
603 IEM_MC_ARG(PRTUINT128U, puDst, 0);
604 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
609 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 else
614 {
615 /*
616 * Register, memory.
617 */
618 IEM_MC_BEGIN(2, 2);
619 IEM_MC_ARG(PRTUINT128U, puDst, 0);
620 IEM_MC_LOCAL(RTUINT128U, uSrc);
621 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
623
624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
626 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
627 /** @todo Most CPUs probably only read the low qword. We read everything to
628 * make sure we apply segmentation and alignment checks correctly.
629 * When we have time, it would be interesting to explore what real
630 * CPUs actually does and whether it will do a TLB load for the high
631 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
632 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
633
634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
635 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
636 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
637
638 IEM_MC_ADVANCE_RIP();
639 IEM_MC_END();
640 }
641 return VINF_SUCCESS;
642}
643
644
645/**
646 * Common worker for MMX instructions on the form:
647 * pxxxx mm1, mm2/mem64
648 *
649 * The 2nd operand is the second half of a register, which in the memory case
650 * means a 64-bit memory access for MMX.
651 */
652FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
653{
654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
655 if (IEM_IS_MODRM_REG_MODE(bRm))
656 {
657 /*
658 * Register, register.
659 */
660 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
661 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
663 IEM_MC_BEGIN(2, 0);
664 IEM_MC_ARG(uint64_t *, puDst, 0);
665 IEM_MC_ARG(uint64_t const *, puSrc, 1);
666 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
667 IEM_MC_PREPARE_FPU_USAGE();
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
671 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
672 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
673 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
674
675 IEM_MC_ADVANCE_RIP();
676 IEM_MC_END();
677 }
678 else
679 {
680 /*
681 * Register, memory.
682 */
683 IEM_MC_BEGIN(2, 2);
684 IEM_MC_ARG(uint64_t *, puDst, 0);
685 IEM_MC_LOCAL(uint64_t, uSrc);
686 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
688
689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
691 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
692 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
693
694 IEM_MC_PREPARE_FPU_USAGE();
695 IEM_MC_FPU_TO_MMX_MODE();
696
697 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
698 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
699 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
700
701 IEM_MC_ADVANCE_RIP();
702 IEM_MC_END();
703 }
704 return VINF_SUCCESS;
705}
706
707
708/**
709 * Common worker for SSE instructions on the form:
710 * pxxxx xmm1, xmm2/mem128
711 *
712 * The 2nd operand is the second half of a register, which for SSE a 128-bit
713 * aligned access where it may read the full 128 bits or only the upper 64 bits.
714 *
715 * Exceptions type 4.
716 */
717FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
718{
719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
720 if (IEM_IS_MODRM_REG_MODE(bRm))
721 {
722 /*
723 * Register, register.
724 */
725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
726 IEM_MC_BEGIN(2, 0);
727 IEM_MC_ARG(PRTUINT128U, puDst, 0);
728 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
730 IEM_MC_PREPARE_SSE_USAGE();
731 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
732 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
733 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
734 IEM_MC_ADVANCE_RIP();
735 IEM_MC_END();
736 }
737 else
738 {
739 /*
740 * Register, memory.
741 */
742 IEM_MC_BEGIN(2, 2);
743 IEM_MC_ARG(PRTUINT128U, puDst, 0);
744 IEM_MC_LOCAL(RTUINT128U, uSrc);
745 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
747
748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
750 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
751 /** @todo Most CPUs probably only read the high qword. We read everything to
752 * make sure we apply segmentation and alignment checks correctly.
753 * When we have time, it would be interesting to explore what real
754 * CPUs actually does and whether it will do a TLB load for the lower
755 * part or skip any associated \#PF. */
756 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
757
758 IEM_MC_PREPARE_SSE_USAGE();
759 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
760 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
761
762 IEM_MC_ADVANCE_RIP();
763 IEM_MC_END();
764 }
765 return VINF_SUCCESS;
766}
767
768
769/**
770 * Common worker for SSE instructions on the forms:
771 * pxxs xmm1, xmm2/mem128
772 *
773 * Proper alignment of the 128-bit operand is enforced.
774 * Exceptions type 2. SSE cpuid checks.
775 *
776 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
777 */
778FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
779{
780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
781 if (IEM_IS_MODRM_REG_MODE(bRm))
782 {
783 /*
784 * Register, register.
785 */
786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
787 IEM_MC_BEGIN(3, 1);
788 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
789 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
790 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
791 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
793 IEM_MC_PREPARE_SSE_USAGE();
794 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
795 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
796 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
797 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
798 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
799
800 IEM_MC_ADVANCE_RIP();
801 IEM_MC_END();
802 }
803 else
804 {
805 /*
806 * Register, memory.
807 */
808 IEM_MC_BEGIN(3, 2);
809 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
810 IEM_MC_LOCAL(X86XMMREG, uSrc2);
811 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
812 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
813 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
815
816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
819 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
820
821 IEM_MC_PREPARE_SSE_USAGE();
822 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
823 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
824 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
825 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
826
827 IEM_MC_ADVANCE_RIP();
828 IEM_MC_END();
829 }
830 return VINF_SUCCESS;
831}
832
833
834/**
835 * Common worker for SSE instructions on the forms:
836 * pxxs xmm1, xmm2/mem32
837 *
838 * Proper alignment of the 128-bit operand is enforced.
839 * Exceptions type 2. SSE cpuid checks.
840 *
841 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
842 */
843FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
844{
845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
846 if (IEM_IS_MODRM_REG_MODE(bRm))
847 {
848 /*
849 * Register, register.
850 */
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_BEGIN(3, 1);
853 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
857 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
858 IEM_MC_PREPARE_SSE_USAGE();
859 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
860 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
861 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
862 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
863 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
864
865 IEM_MC_ADVANCE_RIP();
866 IEM_MC_END();
867 }
868 else
869 {
870 /*
871 * Register, memory.
872 */
873 IEM_MC_BEGIN(3, 2);
874 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
875 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
876 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
877 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
878 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
880
881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
883 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
884 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
885
886 IEM_MC_PREPARE_SSE_USAGE();
887 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
888 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
889 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
890 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
891
892 IEM_MC_ADVANCE_RIP();
893 IEM_MC_END();
894 }
895 return VINF_SUCCESS;
896}
897
898
899/**
900 * Common worker for SSE2 instructions on the forms:
901 * pxxd xmm1, xmm2/mem128
902 *
903 * Proper alignment of the 128-bit operand is enforced.
904 * Exceptions type 2. SSE cpuid checks.
905 *
906 * @sa iemOpCommonSseFp_FullFull_To_Full
907 */
908FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
909{
910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
911 if (IEM_IS_MODRM_REG_MODE(bRm))
912 {
913 /*
914 * Register, register.
915 */
916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
917 IEM_MC_BEGIN(3, 1);
918 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
919 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
920 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
921 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
922 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
923 IEM_MC_PREPARE_SSE_USAGE();
924 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
925 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
926 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
927 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
928 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
929
930 IEM_MC_ADVANCE_RIP();
931 IEM_MC_END();
932 }
933 else
934 {
935 /*
936 * Register, memory.
937 */
938 IEM_MC_BEGIN(3, 2);
939 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
940 IEM_MC_LOCAL(X86XMMREG, uSrc2);
941 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
942 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
943 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
945
946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
948 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
949 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
950
951 IEM_MC_PREPARE_SSE_USAGE();
952 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
953 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
954 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
956
957 IEM_MC_ADVANCE_RIP();
958 IEM_MC_END();
959 }
960 return VINF_SUCCESS;
961}
962
963
964/**
965 * Common worker for SSE2 instructions on the forms:
966 * pxxs xmm1, xmm2/mem64
967 *
968 * Proper alignment of the 128-bit operand is enforced.
969 * Exceptions type 2. SSE2 cpuid checks.
970 *
971 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
972 */
973FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
974{
975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
976 if (IEM_IS_MODRM_REG_MODE(bRm))
977 {
978 /*
979 * Register, register.
980 */
981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
982 IEM_MC_BEGIN(3, 1);
983 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
984 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
985 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
986 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
988 IEM_MC_PREPARE_SSE_USAGE();
989 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
990 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
991 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
992 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
994
995 IEM_MC_ADVANCE_RIP();
996 IEM_MC_END();
997 }
998 else
999 {
1000 /*
1001 * Register, memory.
1002 */
1003 IEM_MC_BEGIN(3, 2);
1004 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1005 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1006 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1007 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1008 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1010
1011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1013 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1014 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1015
1016 IEM_MC_PREPARE_SSE_USAGE();
1017 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1018 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1019 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1020 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1021
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 }
1025 return VINF_SUCCESS;
1026}
1027
1028
1029/**
1030 * Common worker for SSE2 instructions on the form:
1031 * pxxxx xmm1, xmm2/mem128
1032 *
1033 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1034 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1035 *
1036 * Exceptions type 4.
1037 */
1038FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1039{
1040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1041 if (IEM_IS_MODRM_REG_MODE(bRm))
1042 {
1043 /*
1044 * Register, register.
1045 */
1046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1047 IEM_MC_BEGIN(2, 0);
1048 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1049 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1051 IEM_MC_PREPARE_SSE_USAGE();
1052 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1053 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1054 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1055 IEM_MC_ADVANCE_RIP();
1056 IEM_MC_END();
1057 }
1058 else
1059 {
1060 /*
1061 * Register, memory.
1062 */
1063 IEM_MC_BEGIN(2, 2);
1064 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1065 IEM_MC_LOCAL(RTUINT128U, uSrc);
1066 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1068
1069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1072 /** @todo Most CPUs probably only read the high qword. We read everything to
1073 * make sure we apply segmentation and alignment checks correctly.
1074 * When we have time, it would be interesting to explore what real
1075 * CPUs actually does and whether it will do a TLB load for the lower
1076 * part or skip any associated \#PF. */
1077 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1078
1079 IEM_MC_PREPARE_SSE_USAGE();
1080 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1081 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1082
1083 IEM_MC_ADVANCE_RIP();
1084 IEM_MC_END();
1085 }
1086 return VINF_SUCCESS;
1087}
1088
1089
1090/**
1091 * Common worker for SSE3 instructions on the forms:
1092 * hxxx xmm1, xmm2/mem128
1093 *
1094 * Proper alignment of the 128-bit operand is enforced.
1095 * Exceptions type 2. SSE3 cpuid checks.
1096 *
1097 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1098 */
1099FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1100{
1101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1102 if (IEM_IS_MODRM_REG_MODE(bRm))
1103 {
1104 /*
1105 * Register, register.
1106 */
1107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1108 IEM_MC_BEGIN(3, 1);
1109 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1110 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1111 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1112 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1113 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1114 IEM_MC_PREPARE_SSE_USAGE();
1115 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1116 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1117 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1118 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1119 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1120
1121 IEM_MC_ADVANCE_RIP();
1122 IEM_MC_END();
1123 }
1124 else
1125 {
1126 /*
1127 * Register, memory.
1128 */
1129 IEM_MC_BEGIN(3, 2);
1130 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1131 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1132 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1133 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1134 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1136
1137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1139 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1140 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1141
1142 IEM_MC_PREPARE_SSE_USAGE();
1143 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1144 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1145 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1146 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1147
1148 IEM_MC_ADVANCE_RIP();
1149 IEM_MC_END();
1150 }
1151 return VINF_SUCCESS;
1152}
1153
1154
1155/** Opcode 0x0f 0x00 /0. */
1156FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1157{
1158 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1159 IEMOP_HLP_MIN_286();
1160 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1161
1162 if (IEM_IS_MODRM_REG_MODE(bRm))
1163 {
1164 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1165 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1166 }
1167
1168 /* Ignore operand size here, memory refs are always 16-bit. */
1169 IEM_MC_BEGIN(2, 0);
1170 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1171 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1173 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1174 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1175 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1176 IEM_MC_END();
1177 return VINF_SUCCESS;
1178}
1179
1180
1181/** Opcode 0x0f 0x00 /1. */
1182FNIEMOPRM_DEF(iemOp_Grp6_str)
1183{
1184 IEMOP_MNEMONIC(str, "str Rv/Mw");
1185 IEMOP_HLP_MIN_286();
1186 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1187
1188
1189 if (IEM_IS_MODRM_REG_MODE(bRm))
1190 {
1191 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1192 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1193 }
1194
1195 /* Ignore operand size here, memory refs are always 16-bit. */
1196 IEM_MC_BEGIN(2, 0);
1197 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1198 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1200 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1201 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1202 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1203 IEM_MC_END();
1204 return VINF_SUCCESS;
1205}
1206
1207
1208/** Opcode 0x0f 0x00 /2. */
1209FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1210{
1211 IEMOP_MNEMONIC(lldt, "lldt Ew");
1212 IEMOP_HLP_MIN_286();
1213 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1214
1215 if (IEM_IS_MODRM_REG_MODE(bRm))
1216 {
1217 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1218 IEM_MC_BEGIN(1, 0);
1219 IEM_MC_ARG(uint16_t, u16Sel, 0);
1220 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1221 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1222 IEM_MC_END();
1223 }
1224 else
1225 {
1226 IEM_MC_BEGIN(1, 1);
1227 IEM_MC_ARG(uint16_t, u16Sel, 0);
1228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1230 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1231 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1232 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1233 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1234 IEM_MC_END();
1235 }
1236 return VINF_SUCCESS;
1237}
1238
1239
1240/** Opcode 0x0f 0x00 /3. */
1241FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1242{
1243 IEMOP_MNEMONIC(ltr, "ltr Ew");
1244 IEMOP_HLP_MIN_286();
1245 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1246
1247 if (IEM_IS_MODRM_REG_MODE(bRm))
1248 {
1249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1250 IEM_MC_BEGIN(1, 0);
1251 IEM_MC_ARG(uint16_t, u16Sel, 0);
1252 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1253 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1254 IEM_MC_END();
1255 }
1256 else
1257 {
1258 IEM_MC_BEGIN(1, 1);
1259 IEM_MC_ARG(uint16_t, u16Sel, 0);
1260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1263 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1264 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1265 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1266 IEM_MC_END();
1267 }
1268 return VINF_SUCCESS;
1269}
1270
1271
1272/** Opcode 0x0f 0x00 /3. */
1273FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1274{
1275 IEMOP_HLP_MIN_286();
1276 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1277
1278 if (IEM_IS_MODRM_REG_MODE(bRm))
1279 {
1280 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1281 IEM_MC_BEGIN(2, 0);
1282 IEM_MC_ARG(uint16_t, u16Sel, 0);
1283 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1284 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1285 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1286 IEM_MC_END();
1287 }
1288 else
1289 {
1290 IEM_MC_BEGIN(2, 1);
1291 IEM_MC_ARG(uint16_t, u16Sel, 0);
1292 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1295 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1296 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1297 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1298 IEM_MC_END();
1299 }
1300 return VINF_SUCCESS;
1301}
1302
1303
1304/** Opcode 0x0f 0x00 /4. */
1305FNIEMOPRM_DEF(iemOp_Grp6_verr)
1306{
1307 IEMOP_MNEMONIC(verr, "verr Ew");
1308 IEMOP_HLP_MIN_286();
1309 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1310}
1311
1312
1313/** Opcode 0x0f 0x00 /5. */
1314FNIEMOPRM_DEF(iemOp_Grp6_verw)
1315{
1316 IEMOP_MNEMONIC(verw, "verw Ew");
1317 IEMOP_HLP_MIN_286();
1318 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1319}
1320
1321
1322/**
1323 * Group 6 jump table.
1324 */
1325IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1326{
1327 iemOp_Grp6_sldt,
1328 iemOp_Grp6_str,
1329 iemOp_Grp6_lldt,
1330 iemOp_Grp6_ltr,
1331 iemOp_Grp6_verr,
1332 iemOp_Grp6_verw,
1333 iemOp_InvalidWithRM,
1334 iemOp_InvalidWithRM
1335};
1336
1337/** Opcode 0x0f 0x00. */
1338FNIEMOP_DEF(iemOp_Grp6)
1339{
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1342}
1343
1344
1345/** Opcode 0x0f 0x01 /0. */
1346FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1347{
1348 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1349 IEMOP_HLP_MIN_286();
1350 IEMOP_HLP_64BIT_OP_SIZE();
1351 IEM_MC_BEGIN(2, 1);
1352 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1353 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1356 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1357 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1358 IEM_MC_END();
1359 return VINF_SUCCESS;
1360}
1361
1362
1363/** Opcode 0x0f 0x01 /0. */
1364FNIEMOP_DEF(iemOp_Grp7_vmcall)
1365{
1366 IEMOP_MNEMONIC(vmcall, "vmcall");
1367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1368
1369 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1370 want all hypercalls regardless of instruction used, and if a
1371 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1372 (NEM/win makes ASSUMPTIONS about this behavior.) */
1373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1379FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1380{
1381 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1382 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1383 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1384 IEMOP_HLP_DONE_DECODING();
1385 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1386}
1387#else
1388FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1389{
1390 IEMOP_BITCH_ABOUT_STUB();
1391 return IEMOP_RAISE_INVALID_OPCODE();
1392}
1393#endif
1394
1395
1396/** Opcode 0x0f 0x01 /0. */
1397#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1398FNIEMOP_DEF(iemOp_Grp7_vmresume)
1399{
1400 IEMOP_MNEMONIC(vmresume, "vmresume");
1401 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1402 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1403 IEMOP_HLP_DONE_DECODING();
1404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1405}
1406#else
1407FNIEMOP_DEF(iemOp_Grp7_vmresume)
1408{
1409 IEMOP_BITCH_ABOUT_STUB();
1410 return IEMOP_RAISE_INVALID_OPCODE();
1411}
1412#endif
1413
1414
1415/** Opcode 0x0f 0x01 /0. */
1416#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1417FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1418{
1419 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1420 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1421 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1422 IEMOP_HLP_DONE_DECODING();
1423 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1424}
1425#else
1426FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1427{
1428 IEMOP_BITCH_ABOUT_STUB();
1429 return IEMOP_RAISE_INVALID_OPCODE();
1430}
1431#endif
1432
1433
1434/** Opcode 0x0f 0x01 /1. */
1435FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1436{
1437 IEMOP_MNEMONIC(sidt, "sidt Ms");
1438 IEMOP_HLP_MIN_286();
1439 IEMOP_HLP_64BIT_OP_SIZE();
1440 IEM_MC_BEGIN(2, 1);
1441 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1442 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1446 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1447 IEM_MC_END();
1448 return VINF_SUCCESS;
1449}
1450
1451
1452/** Opcode 0x0f 0x01 /1. */
1453FNIEMOP_DEF(iemOp_Grp7_monitor)
1454{
1455 IEMOP_MNEMONIC(monitor, "monitor");
1456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1457 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1458}
1459
1460
1461/** Opcode 0x0f 0x01 /1. */
1462FNIEMOP_DEF(iemOp_Grp7_mwait)
1463{
1464 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1466 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1467}
1468
1469
1470/** Opcode 0x0f 0x01 /2. */
1471FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1472{
1473 IEMOP_MNEMONIC(lgdt, "lgdt");
1474 IEMOP_HLP_64BIT_OP_SIZE();
1475 IEM_MC_BEGIN(3, 1);
1476 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1477 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1478 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1481 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1482 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1483 IEM_MC_END();
1484 return VINF_SUCCESS;
1485}
1486
1487
1488/** Opcode 0x0f 0x01 0xd0. */
1489FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1490{
1491 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1492 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1493 {
1494 /** @todo r=ramshankar: We should use
1495 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1496 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1498 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1499 }
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501}
1502
1503
1504/** Opcode 0x0f 0x01 0xd1. */
1505FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1506{
1507 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1508 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1509 {
1510 /** @todo r=ramshankar: We should use
1511 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1512 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1513 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1514 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1515 }
1516 return IEMOP_RAISE_INVALID_OPCODE();
1517}
1518
1519
1520/** Opcode 0x0f 0x01 /3. */
1521FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1522{
1523 IEMOP_MNEMONIC(lidt, "lidt");
1524 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1525 ? IEMMODE_64BIT
1526 : pVCpu->iem.s.enmEffOpSize;
1527 IEM_MC_BEGIN(3, 1);
1528 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1529 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1530 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1534 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1535 IEM_MC_END();
1536 return VINF_SUCCESS;
1537}
1538
1539
1540/** Opcode 0x0f 0x01 0xd8. */
1541#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1542FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1543{
1544 IEMOP_MNEMONIC(vmrun, "vmrun");
1545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1546 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1547}
1548#else
1549FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1550#endif
1551
1552/** Opcode 0x0f 0x01 0xd9. */
1553FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1554{
1555 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1557
1558 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1559 want all hypercalls regardless of instruction used, and if a
1560 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1561 (NEM/win makes ASSUMPTIONS about this behavior.) */
1562 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1563}
1564
1565/** Opcode 0x0f 0x01 0xda. */
1566#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1567FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1568{
1569 IEMOP_MNEMONIC(vmload, "vmload");
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1571 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1572}
1573#else
1574FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1575#endif
1576
1577
1578/** Opcode 0x0f 0x01 0xdb. */
1579#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1580FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1581{
1582 IEMOP_MNEMONIC(vmsave, "vmsave");
1583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1584 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1585}
1586#else
1587FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1588#endif
1589
1590
1591/** Opcode 0x0f 0x01 0xdc. */
1592#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1593FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1594{
1595 IEMOP_MNEMONIC(stgi, "stgi");
1596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1597 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1598}
1599#else
1600FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1601#endif
1602
1603
1604/** Opcode 0x0f 0x01 0xdd. */
1605#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1606FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1607{
1608 IEMOP_MNEMONIC(clgi, "clgi");
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1610 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1611}
1612#else
1613FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1614#endif
1615
1616
1617/** Opcode 0x0f 0x01 0xdf. */
1618#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1619FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1620{
1621 IEMOP_MNEMONIC(invlpga, "invlpga");
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1623 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1624}
1625#else
1626FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1627#endif
1628
1629
1630/** Opcode 0x0f 0x01 0xde. */
1631#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1632FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1633{
1634 IEMOP_MNEMONIC(skinit, "skinit");
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1637}
1638#else
1639FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1640#endif
1641
1642
1643/** Opcode 0x0f 0x01 /4. */
1644FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1645{
1646 IEMOP_MNEMONIC(smsw, "smsw");
1647 IEMOP_HLP_MIN_286();
1648 if (IEM_IS_MODRM_REG_MODE(bRm))
1649 {
1650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1651 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1652 }
1653
1654 /* Ignore operand size here, memory refs are always 16-bit. */
1655 IEM_MC_BEGIN(2, 0);
1656 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1657 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1661 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1662 IEM_MC_END();
1663 return VINF_SUCCESS;
1664}
1665
1666
1667/** Opcode 0x0f 0x01 /6. */
1668FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1669{
1670 /* The operand size is effectively ignored, all is 16-bit and only the
1671 lower 3-bits are used. */
1672 IEMOP_MNEMONIC(lmsw, "lmsw");
1673 IEMOP_HLP_MIN_286();
1674 if (IEM_IS_MODRM_REG_MODE(bRm))
1675 {
1676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1677 IEM_MC_BEGIN(2, 0);
1678 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1679 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1680 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1681 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1682 IEM_MC_END();
1683 }
1684 else
1685 {
1686 IEM_MC_BEGIN(2, 0);
1687 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1688 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1692 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1693 IEM_MC_END();
1694 }
1695 return VINF_SUCCESS;
1696}
1697
1698
1699/** Opcode 0x0f 0x01 /7. */
1700FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1701{
1702 IEMOP_MNEMONIC(invlpg, "invlpg");
1703 IEMOP_HLP_MIN_486();
1704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1705 IEM_MC_BEGIN(1, 1);
1706 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1708 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1709 IEM_MC_END();
1710 return VINF_SUCCESS;
1711}
1712
1713
1714/** Opcode 0x0f 0x01 /7. */
1715FNIEMOP_DEF(iemOp_Grp7_swapgs)
1716{
1717 IEMOP_MNEMONIC(swapgs, "swapgs");
1718 IEMOP_HLP_ONLY_64BIT();
1719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1720 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1721}
1722
1723
1724/** Opcode 0x0f 0x01 /7. */
1725FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1726{
1727 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1729 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1730}
1731
1732
1733/**
1734 * Group 7 jump table, memory variant.
1735 */
1736IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1737{
1738 iemOp_Grp7_sgdt,
1739 iemOp_Grp7_sidt,
1740 iemOp_Grp7_lgdt,
1741 iemOp_Grp7_lidt,
1742 iemOp_Grp7_smsw,
1743 iemOp_InvalidWithRM,
1744 iemOp_Grp7_lmsw,
1745 iemOp_Grp7_invlpg
1746};
1747
1748
1749/** Opcode 0x0f 0x01. */
1750FNIEMOP_DEF(iemOp_Grp7)
1751{
1752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1753 if (IEM_IS_MODRM_MEM_MODE(bRm))
1754 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1755
1756 switch (IEM_GET_MODRM_REG_8(bRm))
1757 {
1758 case 0:
1759 switch (IEM_GET_MODRM_RM_8(bRm))
1760 {
1761 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1762 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1763 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1764 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1765 }
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767
1768 case 1:
1769 switch (IEM_GET_MODRM_RM_8(bRm))
1770 {
1771 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1772 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1773 }
1774 return IEMOP_RAISE_INVALID_OPCODE();
1775
1776 case 2:
1777 switch (IEM_GET_MODRM_RM_8(bRm))
1778 {
1779 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1780 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1781 }
1782 return IEMOP_RAISE_INVALID_OPCODE();
1783
1784 case 3:
1785 switch (IEM_GET_MODRM_RM_8(bRm))
1786 {
1787 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1788 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1789 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1790 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1791 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1792 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1793 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1794 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1796 }
1797
1798 case 4:
1799 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1800
1801 case 5:
1802 return IEMOP_RAISE_INVALID_OPCODE();
1803
1804 case 6:
1805 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1806
1807 case 7:
1808 switch (IEM_GET_MODRM_RM_8(bRm))
1809 {
1810 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1811 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1812 }
1813 return IEMOP_RAISE_INVALID_OPCODE();
1814
1815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1816 }
1817}
1818
1819/** Opcode 0x0f 0x00 /3. */
1820FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1821{
1822 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1824
1825 if (IEM_IS_MODRM_REG_MODE(bRm))
1826 {
1827 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1828 switch (pVCpu->iem.s.enmEffOpSize)
1829 {
1830 case IEMMODE_16BIT:
1831 {
1832 IEM_MC_BEGIN(3, 0);
1833 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1834 IEM_MC_ARG(uint16_t, u16Sel, 1);
1835 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1836
1837 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1838 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1839 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1840
1841 IEM_MC_END();
1842 return VINF_SUCCESS;
1843 }
1844
1845 case IEMMODE_32BIT:
1846 case IEMMODE_64BIT:
1847 {
1848 IEM_MC_BEGIN(3, 0);
1849 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1850 IEM_MC_ARG(uint16_t, u16Sel, 1);
1851 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1852
1853 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1854 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1855 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1856
1857 IEM_MC_END();
1858 return VINF_SUCCESS;
1859 }
1860
1861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1862 }
1863 }
1864 else
1865 {
1866 switch (pVCpu->iem.s.enmEffOpSize)
1867 {
1868 case IEMMODE_16BIT:
1869 {
1870 IEM_MC_BEGIN(3, 1);
1871 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1872 IEM_MC_ARG(uint16_t, u16Sel, 1);
1873 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1875
1876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1877 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1878
1879 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1881 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1882
1883 IEM_MC_END();
1884 return VINF_SUCCESS;
1885 }
1886
1887 case IEMMODE_32BIT:
1888 case IEMMODE_64BIT:
1889 {
1890 IEM_MC_BEGIN(3, 1);
1891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1892 IEM_MC_ARG(uint16_t, u16Sel, 1);
1893 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1895
1896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1897 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1898/** @todo testcase: make sure it's a 16-bit read. */
1899
1900 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1901 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1902 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1903
1904 IEM_MC_END();
1905 return VINF_SUCCESS;
1906 }
1907
1908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1909 }
1910 }
1911}
1912
1913
1914
1915/** Opcode 0x0f 0x02. */
1916FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1917{
1918 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1919 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1920}
1921
1922
1923/** Opcode 0x0f 0x03. */
1924FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1925{
1926 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1927 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1928}
1929
1930
1931/** Opcode 0x0f 0x05. */
1932FNIEMOP_DEF(iemOp_syscall)
1933{
1934 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1936 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1937}
1938
1939
1940/** Opcode 0x0f 0x06. */
1941FNIEMOP_DEF(iemOp_clts)
1942{
1943 IEMOP_MNEMONIC(clts, "clts");
1944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1945 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1946}
1947
1948
1949/** Opcode 0x0f 0x07. */
1950FNIEMOP_DEF(iemOp_sysret)
1951{
1952 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1955}
1956
1957
1958/** Opcode 0x0f 0x08. */
1959FNIEMOP_DEF(iemOp_invd)
1960{
1961 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1962 IEMOP_HLP_MIN_486();
1963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1964 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1965}
1966
1967
1968/** Opcode 0x0f 0x09. */
1969FNIEMOP_DEF(iemOp_wbinvd)
1970{
1971 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1972 IEMOP_HLP_MIN_486();
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1975}
1976
1977
1978/** Opcode 0x0f 0x0b. */
1979FNIEMOP_DEF(iemOp_ud2)
1980{
1981 IEMOP_MNEMONIC(ud2, "ud2");
1982 return IEMOP_RAISE_INVALID_OPCODE();
1983}
1984
1985/** Opcode 0x0f 0x0d. */
1986FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1987{
1988 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1989 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1990 {
1991 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1992 return IEMOP_RAISE_INVALID_OPCODE();
1993 }
1994
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1996 if (IEM_IS_MODRM_REG_MODE(bRm))
1997 {
1998 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1999 return IEMOP_RAISE_INVALID_OPCODE();
2000 }
2001
2002 switch (IEM_GET_MODRM_REG_8(bRm))
2003 {
2004 case 2: /* Aliased to /0 for the time being. */
2005 case 4: /* Aliased to /0 for the time being. */
2006 case 5: /* Aliased to /0 for the time being. */
2007 case 6: /* Aliased to /0 for the time being. */
2008 case 7: /* Aliased to /0 for the time being. */
2009 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2010 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2011 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2013 }
2014
2015 IEM_MC_BEGIN(0, 1);
2016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2019 /* Currently a NOP. */
2020 NOREF(GCPtrEffSrc);
2021 IEM_MC_ADVANCE_RIP();
2022 IEM_MC_END();
2023 return VINF_SUCCESS;
2024}
2025
2026
2027/** Opcode 0x0f 0x0e. */
2028FNIEMOP_DEF(iemOp_femms)
2029{
2030 IEMOP_MNEMONIC(femms, "femms");
2031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2032
2033 IEM_MC_BEGIN(0,0);
2034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2036 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2037 IEM_MC_FPU_FROM_MMX_MODE();
2038 IEM_MC_ADVANCE_RIP();
2039 IEM_MC_END();
2040 return VINF_SUCCESS;
2041}
2042
2043
2044/** Opcode 0x0f 0x0f. */
2045FNIEMOP_DEF(iemOp_3Dnow)
2046{
2047 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2048 {
2049 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2050 return IEMOP_RAISE_INVALID_OPCODE();
2051 }
2052
2053#ifdef IEM_WITH_3DNOW
2054 /* This is pretty sparse, use switch instead of table. */
2055 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2056 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2057#else
2058 IEMOP_BITCH_ABOUT_STUB();
2059 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2060#endif
2061}
2062
2063
2064/**
2065 * @opcode 0x10
2066 * @oppfx none
2067 * @opcpuid sse
2068 * @opgroup og_sse_simdfp_datamove
2069 * @opxcpttype 4UA
2070 * @optest op1=1 op2=2 -> op1=2
2071 * @optest op1=0 op2=-22 -> op1=-22
2072 */
2073FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2074{
2075 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2077 if (IEM_IS_MODRM_REG_MODE(bRm))
2078 {
2079 /*
2080 * Register, register.
2081 */
2082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2083 IEM_MC_BEGIN(0, 0);
2084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2086 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2087 IEM_GET_MODRM_RM(pVCpu, bRm));
2088 IEM_MC_ADVANCE_RIP();
2089 IEM_MC_END();
2090 }
2091 else
2092 {
2093 /*
2094 * Memory, register.
2095 */
2096 IEM_MC_BEGIN(0, 2);
2097 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2104
2105 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2106 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2107
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 return VINF_SUCCESS;
2112
2113}
2114
2115
2116/**
2117 * @opcode 0x10
2118 * @oppfx 0x66
2119 * @opcpuid sse2
2120 * @opgroup og_sse2_pcksclr_datamove
2121 * @opxcpttype 4UA
2122 * @optest op1=1 op2=2 -> op1=2
2123 * @optest op1=0 op2=-42 -> op1=-42
2124 */
2125FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2126{
2127 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2129 if (IEM_IS_MODRM_REG_MODE(bRm))
2130 {
2131 /*
2132 * Register, register.
2133 */
2134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2135 IEM_MC_BEGIN(0, 0);
2136 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2138 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2139 IEM_GET_MODRM_RM(pVCpu, bRm));
2140 IEM_MC_ADVANCE_RIP();
2141 IEM_MC_END();
2142 }
2143 else
2144 {
2145 /*
2146 * Memory, register.
2147 */
2148 IEM_MC_BEGIN(0, 2);
2149 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2151
2152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2154 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2156
2157 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2158 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2159
2160 IEM_MC_ADVANCE_RIP();
2161 IEM_MC_END();
2162 }
2163 return VINF_SUCCESS;
2164}
2165
2166
2167/**
2168 * @opcode 0x10
2169 * @oppfx 0xf3
2170 * @opcpuid sse
2171 * @opgroup og_sse_simdfp_datamove
2172 * @opxcpttype 5
2173 * @optest op1=1 op2=2 -> op1=2
2174 * @optest op1=0 op2=-22 -> op1=-22
2175 */
2176FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2177{
2178 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2180 if (IEM_IS_MODRM_REG_MODE(bRm))
2181 {
2182 /*
2183 * Register, register.
2184 */
2185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2186 IEM_MC_BEGIN(0, 1);
2187 IEM_MC_LOCAL(uint32_t, uSrc);
2188
2189 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2190 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2191 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2192 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2193
2194 IEM_MC_ADVANCE_RIP();
2195 IEM_MC_END();
2196 }
2197 else
2198 {
2199 /*
2200 * Memory, register.
2201 */
2202 IEM_MC_BEGIN(0, 2);
2203 IEM_MC_LOCAL(uint32_t, uSrc);
2204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2205
2206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2208 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2209 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2210
2211 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2212 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2213
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 }
2217 return VINF_SUCCESS;
2218}
2219
2220
2221/**
2222 * @opcode 0x10
2223 * @oppfx 0xf2
2224 * @opcpuid sse2
2225 * @opgroup og_sse2_pcksclr_datamove
2226 * @opxcpttype 5
2227 * @optest op1=1 op2=2 -> op1=2
2228 * @optest op1=0 op2=-42 -> op1=-42
2229 */
2230FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2231{
2232 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 if (IEM_IS_MODRM_REG_MODE(bRm))
2235 {
2236 /*
2237 * Register, register.
2238 */
2239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2240 IEM_MC_BEGIN(0, 1);
2241 IEM_MC_LOCAL(uint64_t, uSrc);
2242
2243 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2244 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2245 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2246 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2247
2248 IEM_MC_ADVANCE_RIP();
2249 IEM_MC_END();
2250 }
2251 else
2252 {
2253 /*
2254 * Memory, register.
2255 */
2256 IEM_MC_BEGIN(0, 2);
2257 IEM_MC_LOCAL(uint64_t, uSrc);
2258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2259
2260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2263 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2264
2265 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2266 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2267
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 return VINF_SUCCESS;
2272}
2273
2274
2275/**
2276 * @opcode 0x11
2277 * @oppfx none
2278 * @opcpuid sse
2279 * @opgroup og_sse_simdfp_datamove
2280 * @opxcpttype 4UA
2281 * @optest op1=1 op2=2 -> op1=2
2282 * @optest op1=0 op2=-42 -> op1=-42
2283 */
2284FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2285{
2286 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2288 if (IEM_IS_MODRM_REG_MODE(bRm))
2289 {
2290 /*
2291 * Register, register.
2292 */
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2294 IEM_MC_BEGIN(0, 0);
2295 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2296 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2297 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2298 IEM_GET_MODRM_REG(pVCpu, bRm));
2299 IEM_MC_ADVANCE_RIP();
2300 IEM_MC_END();
2301 }
2302 else
2303 {
2304 /*
2305 * Memory, register.
2306 */
2307 IEM_MC_BEGIN(0, 2);
2308 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2310
2311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2314 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2315
2316 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2317 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2318
2319 IEM_MC_ADVANCE_RIP();
2320 IEM_MC_END();
2321 }
2322 return VINF_SUCCESS;
2323}
2324
2325
2326/**
2327 * @opcode 0x11
2328 * @oppfx 0x66
2329 * @opcpuid sse2
2330 * @opgroup og_sse2_pcksclr_datamove
2331 * @opxcpttype 4UA
2332 * @optest op1=1 op2=2 -> op1=2
2333 * @optest op1=0 op2=-42 -> op1=-42
2334 */
2335FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2336{
2337 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2339 if (IEM_IS_MODRM_REG_MODE(bRm))
2340 {
2341 /*
2342 * Register, register.
2343 */
2344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2345 IEM_MC_BEGIN(0, 0);
2346 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2348 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2349 IEM_GET_MODRM_REG(pVCpu, bRm));
2350 IEM_MC_ADVANCE_RIP();
2351 IEM_MC_END();
2352 }
2353 else
2354 {
2355 /*
2356 * Memory, register.
2357 */
2358 IEM_MC_BEGIN(0, 2);
2359 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2361
2362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2365 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2366
2367 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2368 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2369
2370 IEM_MC_ADVANCE_RIP();
2371 IEM_MC_END();
2372 }
2373 return VINF_SUCCESS;
2374}
2375
2376
2377/**
2378 * @opcode 0x11
2379 * @oppfx 0xf3
2380 * @opcpuid sse
2381 * @opgroup og_sse_simdfp_datamove
2382 * @opxcpttype 5
2383 * @optest op1=1 op2=2 -> op1=2
2384 * @optest op1=0 op2=-22 -> op1=-22
2385 */
2386FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2387{
2388 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2390 if (IEM_IS_MODRM_REG_MODE(bRm))
2391 {
2392 /*
2393 * Register, register.
2394 */
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEM_MC_BEGIN(0, 1);
2397 IEM_MC_LOCAL(uint32_t, uSrc);
2398
2399 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2400 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2401 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2402 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2403
2404 IEM_MC_ADVANCE_RIP();
2405 IEM_MC_END();
2406 }
2407 else
2408 {
2409 /*
2410 * Memory, register.
2411 */
2412 IEM_MC_BEGIN(0, 2);
2413 IEM_MC_LOCAL(uint32_t, uSrc);
2414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2415
2416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2419 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2420
2421 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2422 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2423
2424 IEM_MC_ADVANCE_RIP();
2425 IEM_MC_END();
2426 }
2427 return VINF_SUCCESS;
2428}
2429
2430
2431/**
2432 * @opcode 0x11
2433 * @oppfx 0xf2
2434 * @opcpuid sse2
2435 * @opgroup og_sse2_pcksclr_datamove
2436 * @opxcpttype 5
2437 * @optest op1=1 op2=2 -> op1=2
2438 * @optest op1=0 op2=-42 -> op1=-42
2439 */
2440FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2441{
2442 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2444 if (IEM_IS_MODRM_REG_MODE(bRm))
2445 {
2446 /*
2447 * Register, register.
2448 */
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_BEGIN(0, 1);
2451 IEM_MC_LOCAL(uint64_t, uSrc);
2452
2453 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2455 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2456 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2457
2458 IEM_MC_ADVANCE_RIP();
2459 IEM_MC_END();
2460 }
2461 else
2462 {
2463 /*
2464 * Memory, register.
2465 */
2466 IEM_MC_BEGIN(0, 2);
2467 IEM_MC_LOCAL(uint64_t, uSrc);
2468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2469
2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2472 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2473 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2474
2475 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2476 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2477
2478 IEM_MC_ADVANCE_RIP();
2479 IEM_MC_END();
2480 }
2481 return VINF_SUCCESS;
2482}
2483
2484
2485FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2486{
2487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2488 if (IEM_IS_MODRM_REG_MODE(bRm))
2489 {
2490 /**
2491 * @opcode 0x12
2492 * @opcodesub 11 mr/reg
2493 * @oppfx none
2494 * @opcpuid sse
2495 * @opgroup og_sse_simdfp_datamove
2496 * @opxcpttype 5
2497 * @optest op1=1 op2=2 -> op1=2
2498 * @optest op1=0 op2=-42 -> op1=-42
2499 */
2500 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2501
2502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2503 IEM_MC_BEGIN(0, 1);
2504 IEM_MC_LOCAL(uint64_t, uSrc);
2505
2506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2507 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2508 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2509 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2510
2511 IEM_MC_ADVANCE_RIP();
2512 IEM_MC_END();
2513 }
2514 else
2515 {
2516 /**
2517 * @opdone
2518 * @opcode 0x12
2519 * @opcodesub !11 mr/reg
2520 * @oppfx none
2521 * @opcpuid sse
2522 * @opgroup og_sse_simdfp_datamove
2523 * @opxcpttype 5
2524 * @optest op1=1 op2=2 -> op1=2
2525 * @optest op1=0 op2=-42 -> op1=-42
2526 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2527 */
2528 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2529
2530 IEM_MC_BEGIN(0, 2);
2531 IEM_MC_LOCAL(uint64_t, uSrc);
2532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2533
2534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2538
2539 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2540 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2541
2542 IEM_MC_ADVANCE_RIP();
2543 IEM_MC_END();
2544 }
2545 return VINF_SUCCESS;
2546}
2547
2548
2549/**
2550 * @opcode 0x12
2551 * @opcodesub !11 mr/reg
2552 * @oppfx 0x66
2553 * @opcpuid sse2
2554 * @opgroup og_sse2_pcksclr_datamove
2555 * @opxcpttype 5
2556 * @optest op1=1 op2=2 -> op1=2
2557 * @optest op1=0 op2=-42 -> op1=-42
2558 */
2559FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2560{
2561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2562 if (IEM_IS_MODRM_MEM_MODE(bRm))
2563 {
2564 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2565
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(uint64_t, uSrc);
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 return VINF_SUCCESS;
2581 }
2582
2583 /**
2584 * @opdone
2585 * @opmnemonic ud660f12m3
2586 * @opcode 0x12
2587 * @opcodesub 11 mr/reg
2588 * @oppfx 0x66
2589 * @opunused immediate
2590 * @opcpuid sse
2591 * @optest ->
2592 */
2593 return IEMOP_RAISE_INVALID_OPCODE();
2594}
2595
2596
2597/**
2598 * @opcode 0x12
2599 * @oppfx 0xf3
2600 * @opcpuid sse3
2601 * @opgroup og_sse3_pcksclr_datamove
2602 * @opxcpttype 4
2603 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2604 * op1=0x00000002000000020000000100000001
2605 */
2606FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2607{
2608 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2610 if (IEM_IS_MODRM_REG_MODE(bRm))
2611 {
2612 /*
2613 * Register, register.
2614 */
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616 IEM_MC_BEGIN(2, 0);
2617 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2618 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2619
2620 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2621 IEM_MC_PREPARE_SSE_USAGE();
2622
2623 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2624 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2625 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2626
2627 IEM_MC_ADVANCE_RIP();
2628 IEM_MC_END();
2629 }
2630 else
2631 {
2632 /*
2633 * Register, memory.
2634 */
2635 IEM_MC_BEGIN(2, 2);
2636 IEM_MC_LOCAL(RTUINT128U, uSrc);
2637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2638 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2639 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2640
2641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2643 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2644 IEM_MC_PREPARE_SSE_USAGE();
2645
2646 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2647 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2648 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2649
2650 IEM_MC_ADVANCE_RIP();
2651 IEM_MC_END();
2652 }
2653 return VINF_SUCCESS;
2654}
2655
2656
2657/**
2658 * @opcode 0x12
2659 * @oppfx 0xf2
2660 * @opcpuid sse3
2661 * @opgroup og_sse3_pcksclr_datamove
2662 * @opxcpttype 5
2663 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2664 * op1=0x22222222111111112222222211111111
2665 */
2666FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2667{
2668 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2670 if (IEM_IS_MODRM_REG_MODE(bRm))
2671 {
2672 /*
2673 * Register, register.
2674 */
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_BEGIN(2, 0);
2677 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2678 IEM_MC_ARG(uint64_t, uSrc, 1);
2679
2680 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2681 IEM_MC_PREPARE_SSE_USAGE();
2682
2683 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2684 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2685 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2686
2687 IEM_MC_ADVANCE_RIP();
2688 IEM_MC_END();
2689 }
2690 else
2691 {
2692 /*
2693 * Register, memory.
2694 */
2695 IEM_MC_BEGIN(2, 2);
2696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2697 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2698 IEM_MC_ARG(uint64_t, uSrc, 1);
2699
2700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2702 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2703 IEM_MC_PREPARE_SSE_USAGE();
2704
2705 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2706 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2707 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2708
2709 IEM_MC_ADVANCE_RIP();
2710 IEM_MC_END();
2711 }
2712 return VINF_SUCCESS;
2713}
2714
2715
2716/**
2717 * @opcode 0x13
2718 * @opcodesub !11 mr/reg
2719 * @oppfx none
2720 * @opcpuid sse
2721 * @opgroup og_sse_simdfp_datamove
2722 * @opxcpttype 5
2723 * @optest op1=1 op2=2 -> op1=2
2724 * @optest op1=0 op2=-42 -> op1=-42
2725 */
2726FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2727{
2728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2729 if (IEM_IS_MODRM_MEM_MODE(bRm))
2730 {
2731 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2732
2733 IEM_MC_BEGIN(0, 2);
2734 IEM_MC_LOCAL(uint64_t, uSrc);
2735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2736
2737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2739 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2740 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2741
2742 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2743 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2744
2745 IEM_MC_ADVANCE_RIP();
2746 IEM_MC_END();
2747 return VINF_SUCCESS;
2748 }
2749
2750 /**
2751 * @opdone
2752 * @opmnemonic ud0f13m3
2753 * @opcode 0x13
2754 * @opcodesub 11 mr/reg
2755 * @oppfx none
2756 * @opunused immediate
2757 * @opcpuid sse
2758 * @optest ->
2759 */
2760 return IEMOP_RAISE_INVALID_OPCODE();
2761}
2762
2763
2764/**
2765 * @opcode 0x13
2766 * @opcodesub !11 mr/reg
2767 * @oppfx 0x66
2768 * @opcpuid sse2
2769 * @opgroup og_sse2_pcksclr_datamove
2770 * @opxcpttype 5
2771 * @optest op1=1 op2=2 -> op1=2
2772 * @optest op1=0 op2=-42 -> op1=-42
2773 */
2774FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2775{
2776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2777 if (IEM_IS_MODRM_MEM_MODE(bRm))
2778 {
2779 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2780 IEM_MC_BEGIN(0, 2);
2781 IEM_MC_LOCAL(uint64_t, uSrc);
2782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2783
2784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2786 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2787 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2788
2789 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2790 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2791
2792 IEM_MC_ADVANCE_RIP();
2793 IEM_MC_END();
2794 return VINF_SUCCESS;
2795 }
2796
2797 /**
2798 * @opdone
2799 * @opmnemonic ud660f13m3
2800 * @opcode 0x13
2801 * @opcodesub 11 mr/reg
2802 * @oppfx 0x66
2803 * @opunused immediate
2804 * @opcpuid sse
2805 * @optest ->
2806 */
2807 return IEMOP_RAISE_INVALID_OPCODE();
2808}
2809
2810
2811/**
2812 * @opmnemonic udf30f13
2813 * @opcode 0x13
2814 * @oppfx 0xf3
2815 * @opunused intel-modrm
2816 * @opcpuid sse
2817 * @optest ->
2818 * @opdone
2819 */
2820
2821/**
2822 * @opmnemonic udf20f13
2823 * @opcode 0x13
2824 * @oppfx 0xf2
2825 * @opunused intel-modrm
2826 * @opcpuid sse
2827 * @optest ->
2828 * @opdone
2829 */
2830
2831/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2832FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2833{
2834 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2835 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2836}
2837
2838
2839/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2840FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2841{
2842 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2843 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2844}
2845
2846
2847/**
2848 * @opdone
2849 * @opmnemonic udf30f14
2850 * @opcode 0x14
2851 * @oppfx 0xf3
2852 * @opunused intel-modrm
2853 * @opcpuid sse
2854 * @optest ->
2855 * @opdone
2856 */
2857
2858/**
2859 * @opmnemonic udf20f14
2860 * @opcode 0x14
2861 * @oppfx 0xf2
2862 * @opunused intel-modrm
2863 * @opcpuid sse
2864 * @optest ->
2865 * @opdone
2866 */
2867
2868/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2869FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2870{
2871 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2872 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2873}
2874
2875
2876/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2877FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2878{
2879 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2880 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2881}
2882
2883
2884/* Opcode 0xf3 0x0f 0x15 - invalid */
2885/* Opcode 0xf2 0x0f 0x15 - invalid */
2886
2887/**
2888 * @opdone
2889 * @opmnemonic udf30f15
2890 * @opcode 0x15
2891 * @oppfx 0xf3
2892 * @opunused intel-modrm
2893 * @opcpuid sse
2894 * @optest ->
2895 * @opdone
2896 */
2897
2898/**
2899 * @opmnemonic udf20f15
2900 * @opcode 0x15
2901 * @oppfx 0xf2
2902 * @opunused intel-modrm
2903 * @opcpuid sse
2904 * @optest ->
2905 * @opdone
2906 */
2907
2908FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2909{
2910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2911 if (IEM_IS_MODRM_REG_MODE(bRm))
2912 {
2913 /**
2914 * @opcode 0x16
2915 * @opcodesub 11 mr/reg
2916 * @oppfx none
2917 * @opcpuid sse
2918 * @opgroup og_sse_simdfp_datamove
2919 * @opxcpttype 5
2920 * @optest op1=1 op2=2 -> op1=2
2921 * @optest op1=0 op2=-42 -> op1=-42
2922 */
2923 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2924
2925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2926 IEM_MC_BEGIN(0, 1);
2927 IEM_MC_LOCAL(uint64_t, uSrc);
2928
2929 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2930 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2931 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2932 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2933
2934 IEM_MC_ADVANCE_RIP();
2935 IEM_MC_END();
2936 }
2937 else
2938 {
2939 /**
2940 * @opdone
2941 * @opcode 0x16
2942 * @opcodesub !11 mr/reg
2943 * @oppfx none
2944 * @opcpuid sse
2945 * @opgroup og_sse_simdfp_datamove
2946 * @opxcpttype 5
2947 * @optest op1=1 op2=2 -> op1=2
2948 * @optest op1=0 op2=-42 -> op1=-42
2949 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2950 */
2951 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2952
2953 IEM_MC_BEGIN(0, 2);
2954 IEM_MC_LOCAL(uint64_t, uSrc);
2955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2956
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2959 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2960 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2961
2962 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2963 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2964
2965 IEM_MC_ADVANCE_RIP();
2966 IEM_MC_END();
2967 }
2968 return VINF_SUCCESS;
2969}
2970
2971
2972/**
2973 * @opcode 0x16
2974 * @opcodesub !11 mr/reg
2975 * @oppfx 0x66
2976 * @opcpuid sse2
2977 * @opgroup og_sse2_pcksclr_datamove
2978 * @opxcpttype 5
2979 * @optest op1=1 op2=2 -> op1=2
2980 * @optest op1=0 op2=-42 -> op1=-42
2981 */
2982FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2983{
2984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2985 if (IEM_IS_MODRM_MEM_MODE(bRm))
2986 {
2987 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2988 IEM_MC_BEGIN(0, 2);
2989 IEM_MC_LOCAL(uint64_t, uSrc);
2990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2991
2992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2994 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2995 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2996
2997 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2998 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2999
3000 IEM_MC_ADVANCE_RIP();
3001 IEM_MC_END();
3002 return VINF_SUCCESS;
3003 }
3004
3005 /**
3006 * @opdone
3007 * @opmnemonic ud660f16m3
3008 * @opcode 0x16
3009 * @opcodesub 11 mr/reg
3010 * @oppfx 0x66
3011 * @opunused immediate
3012 * @opcpuid sse
3013 * @optest ->
3014 */
3015 return IEMOP_RAISE_INVALID_OPCODE();
3016}
3017
3018
3019/**
3020 * @opcode 0x16
3021 * @oppfx 0xf3
3022 * @opcpuid sse3
3023 * @opgroup og_sse3_pcksclr_datamove
3024 * @opxcpttype 4
3025 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3026 * op1=0x00000002000000020000000100000001
3027 */
3028FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3029{
3030 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3032 if (IEM_IS_MODRM_REG_MODE(bRm))
3033 {
3034 /*
3035 * Register, register.
3036 */
3037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3038 IEM_MC_BEGIN(2, 0);
3039 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3040 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3041
3042 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3043 IEM_MC_PREPARE_SSE_USAGE();
3044
3045 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3046 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3047 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3048
3049 IEM_MC_ADVANCE_RIP();
3050 IEM_MC_END();
3051 }
3052 else
3053 {
3054 /*
3055 * Register, memory.
3056 */
3057 IEM_MC_BEGIN(2, 2);
3058 IEM_MC_LOCAL(RTUINT128U, uSrc);
3059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3060 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3061 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3062
3063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3065 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3066 IEM_MC_PREPARE_SSE_USAGE();
3067
3068 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3069 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3070 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3071
3072 IEM_MC_ADVANCE_RIP();
3073 IEM_MC_END();
3074 }
3075 return VINF_SUCCESS;
3076}
3077
3078/**
3079 * @opdone
3080 * @opmnemonic udf30f16
3081 * @opcode 0x16
3082 * @oppfx 0xf2
3083 * @opunused intel-modrm
3084 * @opcpuid sse
3085 * @optest ->
3086 * @opdone
3087 */
3088
3089
3090/**
3091 * @opcode 0x17
3092 * @opcodesub !11 mr/reg
3093 * @oppfx none
3094 * @opcpuid sse
3095 * @opgroup og_sse_simdfp_datamove
3096 * @opxcpttype 5
3097 * @optest op1=1 op2=2 -> op1=2
3098 * @optest op1=0 op2=-42 -> op1=-42
3099 */
3100FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3101{
3102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3103 if (IEM_IS_MODRM_MEM_MODE(bRm))
3104 {
3105 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3106
3107 IEM_MC_BEGIN(0, 2);
3108 IEM_MC_LOCAL(uint64_t, uSrc);
3109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3110
3111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3115
3116 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3117 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3118
3119 IEM_MC_ADVANCE_RIP();
3120 IEM_MC_END();
3121 return VINF_SUCCESS;
3122 }
3123
3124 /**
3125 * @opdone
3126 * @opmnemonic ud0f17m3
3127 * @opcode 0x17
3128 * @opcodesub 11 mr/reg
3129 * @oppfx none
3130 * @opunused immediate
3131 * @opcpuid sse
3132 * @optest ->
3133 */
3134 return IEMOP_RAISE_INVALID_OPCODE();
3135}
3136
3137
3138/**
3139 * @opcode 0x17
3140 * @opcodesub !11 mr/reg
3141 * @oppfx 0x66
3142 * @opcpuid sse2
3143 * @opgroup og_sse2_pcksclr_datamove
3144 * @opxcpttype 5
3145 * @optest op1=1 op2=2 -> op1=2
3146 * @optest op1=0 op2=-42 -> op1=-42
3147 */
3148FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3149{
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 if (IEM_IS_MODRM_MEM_MODE(bRm))
3152 {
3153 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3154
3155 IEM_MC_BEGIN(0, 2);
3156 IEM_MC_LOCAL(uint64_t, uSrc);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3158
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3162 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3163
3164 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3165 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3166
3167 IEM_MC_ADVANCE_RIP();
3168 IEM_MC_END();
3169 return VINF_SUCCESS;
3170 }
3171
3172 /**
3173 * @opdone
3174 * @opmnemonic ud660f17m3
3175 * @opcode 0x17
3176 * @opcodesub 11 mr/reg
3177 * @oppfx 0x66
3178 * @opunused immediate
3179 * @opcpuid sse
3180 * @optest ->
3181 */
3182 return IEMOP_RAISE_INVALID_OPCODE();
3183}
3184
3185
3186/**
3187 * @opdone
3188 * @opmnemonic udf30f17
3189 * @opcode 0x17
3190 * @oppfx 0xf3
3191 * @opunused intel-modrm
3192 * @opcpuid sse
3193 * @optest ->
3194 * @opdone
3195 */
3196
3197/**
3198 * @opmnemonic udf20f17
3199 * @opcode 0x17
3200 * @oppfx 0xf2
3201 * @opunused intel-modrm
3202 * @opcpuid sse
3203 * @optest ->
3204 * @opdone
3205 */
3206
3207
3208/** Opcode 0x0f 0x18. */
3209FNIEMOP_DEF(iemOp_prefetch_Grp16)
3210{
3211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3212 if (IEM_IS_MODRM_MEM_MODE(bRm))
3213 {
3214 switch (IEM_GET_MODRM_REG_8(bRm))
3215 {
3216 case 4: /* Aliased to /0 for the time being according to AMD. */
3217 case 5: /* Aliased to /0 for the time being according to AMD. */
3218 case 6: /* Aliased to /0 for the time being according to AMD. */
3219 case 7: /* Aliased to /0 for the time being according to AMD. */
3220 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3221 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3222 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3223 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3225 }
3226
3227 IEM_MC_BEGIN(0, 1);
3228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3231 /* Currently a NOP. */
3232 NOREF(GCPtrEffSrc);
3233 IEM_MC_ADVANCE_RIP();
3234 IEM_MC_END();
3235 return VINF_SUCCESS;
3236 }
3237
3238 return IEMOP_RAISE_INVALID_OPCODE();
3239}
3240
3241
3242/** Opcode 0x0f 0x19..0x1f. */
3243FNIEMOP_DEF(iemOp_nop_Ev)
3244{
3245 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3247 if (IEM_IS_MODRM_REG_MODE(bRm))
3248 {
3249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3250 IEM_MC_BEGIN(0, 0);
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 }
3254 else
3255 {
3256 IEM_MC_BEGIN(0, 1);
3257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3260 /* Currently a NOP. */
3261 NOREF(GCPtrEffSrc);
3262 IEM_MC_ADVANCE_RIP();
3263 IEM_MC_END();
3264 }
3265 return VINF_SUCCESS;
3266}
3267
3268
3269/** Opcode 0x0f 0x20. */
3270FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3271{
3272 /* mod is ignored, as is operand size overrides. */
3273 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3274 IEMOP_HLP_MIN_386();
3275 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3276 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3277 else
3278 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3279
3280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3281 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3282 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3283 {
3284 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3285 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3286 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3287 iCrReg |= 8;
3288 }
3289 switch (iCrReg)
3290 {
3291 case 0: case 2: case 3: case 4: case 8:
3292 break;
3293 default:
3294 return IEMOP_RAISE_INVALID_OPCODE();
3295 }
3296 IEMOP_HLP_DONE_DECODING();
3297
3298 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3299}
3300
3301
3302/** Opcode 0x0f 0x21. */
3303FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3304{
3305 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3306 IEMOP_HLP_MIN_386();
3307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3310 return IEMOP_RAISE_INVALID_OPCODE();
3311 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3312 IEM_GET_MODRM_RM(pVCpu, bRm),
3313 IEM_GET_MODRM_REG_8(bRm));
3314}
3315
3316
3317/** Opcode 0x0f 0x22. */
3318FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3319{
3320 /* mod is ignored, as is operand size overrides. */
3321 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3322 IEMOP_HLP_MIN_386();
3323 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3324 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3325 else
3326 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3327
3328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3329 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3330 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3331 {
3332 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3333 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3334 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3335 iCrReg |= 8;
3336 }
3337 switch (iCrReg)
3338 {
3339 case 0: case 2: case 3: case 4: case 8:
3340 break;
3341 default:
3342 return IEMOP_RAISE_INVALID_OPCODE();
3343 }
3344 IEMOP_HLP_DONE_DECODING();
3345
3346 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3347}
3348
3349
3350/** Opcode 0x0f 0x23. */
3351FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3352{
3353 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3354 IEMOP_HLP_MIN_386();
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3358 return IEMOP_RAISE_INVALID_OPCODE();
3359 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3360 IEM_GET_MODRM_REG_8(bRm),
3361 IEM_GET_MODRM_RM(pVCpu, bRm));
3362}
3363
3364
3365/** Opcode 0x0f 0x24. */
3366FNIEMOP_DEF(iemOp_mov_Rd_Td)
3367{
3368 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3369 IEMOP_HLP_MIN_386();
3370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3372 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3373 return IEMOP_RAISE_INVALID_OPCODE();
3374 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3375 IEM_GET_MODRM_RM(pVCpu, bRm),
3376 IEM_GET_MODRM_REG_8(bRm));
3377}
3378
3379
3380/** Opcode 0x0f 0x26. */
3381FNIEMOP_DEF(iemOp_mov_Td_Rd)
3382{
3383 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3384 IEMOP_HLP_MIN_386();
3385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3387 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3388 return IEMOP_RAISE_INVALID_OPCODE();
3389 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3390 IEM_GET_MODRM_REG_8(bRm),
3391 IEM_GET_MODRM_RM(pVCpu, bRm));
3392}
3393
3394
3395/**
3396 * @opcode 0x28
3397 * @oppfx none
3398 * @opcpuid sse
3399 * @opgroup og_sse_simdfp_datamove
3400 * @opxcpttype 1
3401 * @optest op1=1 op2=2 -> op1=2
3402 * @optest op1=0 op2=-42 -> op1=-42
3403 */
3404FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3405{
3406 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3408 if (IEM_IS_MODRM_REG_MODE(bRm))
3409 {
3410 /*
3411 * Register, register.
3412 */
3413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3414 IEM_MC_BEGIN(0, 0);
3415 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3417 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3418 IEM_GET_MODRM_RM(pVCpu, bRm));
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 }
3422 else
3423 {
3424 /*
3425 * Register, memory.
3426 */
3427 IEM_MC_BEGIN(0, 2);
3428 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3430
3431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3433 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3435
3436 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3437 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3438
3439 IEM_MC_ADVANCE_RIP();
3440 IEM_MC_END();
3441 }
3442 return VINF_SUCCESS;
3443}
3444
3445/**
3446 * @opcode 0x28
3447 * @oppfx 66
3448 * @opcpuid sse2
3449 * @opgroup og_sse2_pcksclr_datamove
3450 * @opxcpttype 1
3451 * @optest op1=1 op2=2 -> op1=2
3452 * @optest op1=0 op2=-42 -> op1=-42
3453 */
3454FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3455{
3456 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3458 if (IEM_IS_MODRM_REG_MODE(bRm))
3459 {
3460 /*
3461 * Register, register.
3462 */
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3464 IEM_MC_BEGIN(0, 0);
3465 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3467 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3468 IEM_GET_MODRM_RM(pVCpu, bRm));
3469 IEM_MC_ADVANCE_RIP();
3470 IEM_MC_END();
3471 }
3472 else
3473 {
3474 /*
3475 * Register, memory.
3476 */
3477 IEM_MC_BEGIN(0, 2);
3478 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3480
3481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3485
3486 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3487 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3488
3489 IEM_MC_ADVANCE_RIP();
3490 IEM_MC_END();
3491 }
3492 return VINF_SUCCESS;
3493}
3494
3495/* Opcode 0xf3 0x0f 0x28 - invalid */
3496/* Opcode 0xf2 0x0f 0x28 - invalid */
3497
3498/**
3499 * @opcode 0x29
3500 * @oppfx none
3501 * @opcpuid sse
3502 * @opgroup og_sse_simdfp_datamove
3503 * @opxcpttype 1
3504 * @optest op1=1 op2=2 -> op1=2
3505 * @optest op1=0 op2=-42 -> op1=-42
3506 */
3507FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3508{
3509 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3511 if (IEM_IS_MODRM_REG_MODE(bRm))
3512 {
3513 /*
3514 * Register, register.
3515 */
3516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3517 IEM_MC_BEGIN(0, 0);
3518 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3519 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3520 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3521 IEM_GET_MODRM_REG(pVCpu, bRm));
3522 IEM_MC_ADVANCE_RIP();
3523 IEM_MC_END();
3524 }
3525 else
3526 {
3527 /*
3528 * Memory, register.
3529 */
3530 IEM_MC_BEGIN(0, 2);
3531 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3533
3534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3538
3539 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3540 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3541
3542 IEM_MC_ADVANCE_RIP();
3543 IEM_MC_END();
3544 }
3545 return VINF_SUCCESS;
3546}
3547
3548/**
3549 * @opcode 0x29
3550 * @oppfx 66
3551 * @opcpuid sse2
3552 * @opgroup og_sse2_pcksclr_datamove
3553 * @opxcpttype 1
3554 * @optest op1=1 op2=2 -> op1=2
3555 * @optest op1=0 op2=-42 -> op1=-42
3556 */
3557FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3558{
3559 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3561 if (IEM_IS_MODRM_REG_MODE(bRm))
3562 {
3563 /*
3564 * Register, register.
3565 */
3566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3567 IEM_MC_BEGIN(0, 0);
3568 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3570 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3571 IEM_GET_MODRM_REG(pVCpu, bRm));
3572 IEM_MC_ADVANCE_RIP();
3573 IEM_MC_END();
3574 }
3575 else
3576 {
3577 /*
3578 * Memory, register.
3579 */
3580 IEM_MC_BEGIN(0, 2);
3581 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3583
3584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3586 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3587 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3588
3589 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3590 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3591
3592 IEM_MC_ADVANCE_RIP();
3593 IEM_MC_END();
3594 }
3595 return VINF_SUCCESS;
3596}
3597
3598/* Opcode 0xf3 0x0f 0x29 - invalid */
3599/* Opcode 0xf2 0x0f 0x29 - invalid */
3600
3601
3602/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3603FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3604/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3605FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3606
3607
3608/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3609FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3610{
3611 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3612
3613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3614 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3615 {
3616 if (IEM_IS_MODRM_REG_MODE(bRm))
3617 {
3618 /* XMM, greg64 */
3619 IEM_MC_BEGIN(3, 4);
3620 IEM_MC_LOCAL(uint32_t, fMxcsr);
3621 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3622 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3623 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3624 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3625
3626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3627 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3628 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3629
3630 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3631 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3632 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3633 IEM_MC_IF_MXCSR_XCPT_PENDING()
3634 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3635 IEM_MC_ELSE()
3636 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3637 IEM_MC_ENDIF();
3638
3639 IEM_MC_ADVANCE_RIP();
3640 IEM_MC_END();
3641 }
3642 else
3643 {
3644 /* XMM, [mem64] */
3645 IEM_MC_BEGIN(3, 4);
3646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3647 IEM_MC_LOCAL(uint32_t, fMxcsr);
3648 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3649 IEM_MC_LOCAL(int64_t, i64Src);
3650 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3651 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3652 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3653
3654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3656 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3657 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3658
3659 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3660 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3661 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3662 IEM_MC_IF_MXCSR_XCPT_PENDING()
3663 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3664 IEM_MC_ELSE()
3665 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3666 IEM_MC_ENDIF();
3667
3668 IEM_MC_ADVANCE_RIP();
3669 IEM_MC_END();
3670 }
3671 }
3672 else
3673 {
3674 if (IEM_IS_MODRM_REG_MODE(bRm))
3675 {
3676 /* greg, XMM */
3677 IEM_MC_BEGIN(3, 4);
3678 IEM_MC_LOCAL(uint32_t, fMxcsr);
3679 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3680 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3681 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3682 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3683
3684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3685 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3686 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3687
3688 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3689 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3690 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3691 IEM_MC_IF_MXCSR_XCPT_PENDING()
3692 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3693 IEM_MC_ELSE()
3694 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3695 IEM_MC_ENDIF();
3696
3697 IEM_MC_ADVANCE_RIP();
3698 IEM_MC_END();
3699 }
3700 else
3701 {
3702 /* greg, [mem] */
3703 IEM_MC_BEGIN(3, 4);
3704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3705 IEM_MC_LOCAL(uint32_t, fMxcsr);
3706 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3707 IEM_MC_LOCAL(int32_t, i32Src);
3708 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3709 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3710 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3711
3712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3715 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3716
3717 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3718 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3719 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3720 IEM_MC_IF_MXCSR_XCPT_PENDING()
3721 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3722 IEM_MC_ELSE()
3723 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3724 IEM_MC_ENDIF();
3725
3726 IEM_MC_ADVANCE_RIP();
3727 IEM_MC_END();
3728 }
3729 }
3730 return VINF_SUCCESS;
3731}
3732
3733
3734/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3735FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3736{
3737 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3738
3739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3741 {
3742 if (IEM_IS_MODRM_REG_MODE(bRm))
3743 {
3744 /* XMM, greg64 */
3745 IEM_MC_BEGIN(3, 4);
3746 IEM_MC_LOCAL(uint32_t, fMxcsr);
3747 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3748 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3749 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3750 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3751
3752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3754 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3755
3756 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3757 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3758 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3759 IEM_MC_IF_MXCSR_XCPT_PENDING()
3760 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3761 IEM_MC_ELSE()
3762 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3763 IEM_MC_ENDIF();
3764
3765 IEM_MC_ADVANCE_RIP();
3766 IEM_MC_END();
3767 }
3768 else
3769 {
3770 /* XMM, [mem64] */
3771 IEM_MC_BEGIN(3, 4);
3772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3773 IEM_MC_LOCAL(uint32_t, fMxcsr);
3774 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3775 IEM_MC_LOCAL(int64_t, i64Src);
3776 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3777 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3778 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3779
3780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3782 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3783 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3784
3785 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3786 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3787 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3788 IEM_MC_IF_MXCSR_XCPT_PENDING()
3789 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3790 IEM_MC_ELSE()
3791 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3792 IEM_MC_ENDIF();
3793
3794 IEM_MC_ADVANCE_RIP();
3795 IEM_MC_END();
3796 }
3797 }
3798 else
3799 {
3800 if (IEM_IS_MODRM_REG_MODE(bRm))
3801 {
3802 /* greg, XMM */
3803 IEM_MC_BEGIN(3, 4);
3804 IEM_MC_LOCAL(uint32_t, fMxcsr);
3805 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3806 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3807 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3808 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3809
3810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3812 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3813
3814 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3815 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3816 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3817 IEM_MC_IF_MXCSR_XCPT_PENDING()
3818 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3819 IEM_MC_ELSE()
3820 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3821 IEM_MC_ENDIF();
3822
3823 IEM_MC_ADVANCE_RIP();
3824 IEM_MC_END();
3825 }
3826 else
3827 {
3828 /* greg, [mem] */
3829 IEM_MC_BEGIN(3, 4);
3830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3831 IEM_MC_LOCAL(uint32_t, fMxcsr);
3832 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3833 IEM_MC_LOCAL(int32_t, i32Src);
3834 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3835 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3836 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3837
3838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3841 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3842
3843 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3844 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3845 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3846 IEM_MC_IF_MXCSR_XCPT_PENDING()
3847 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3848 IEM_MC_ELSE()
3849 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3850 IEM_MC_ENDIF();
3851
3852 IEM_MC_ADVANCE_RIP();
3853 IEM_MC_END();
3854 }
3855 }
3856 return VINF_SUCCESS;
3857}
3858
3859
3860/**
3861 * @opcode 0x2b
3862 * @opcodesub !11 mr/reg
3863 * @oppfx none
3864 * @opcpuid sse
3865 * @opgroup og_sse1_cachect
3866 * @opxcpttype 1
3867 * @optest op1=1 op2=2 -> op1=2
3868 * @optest op1=0 op2=-42 -> op1=-42
3869 */
3870FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3871{
3872 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3874 if (IEM_IS_MODRM_MEM_MODE(bRm))
3875 {
3876 /*
3877 * memory, register.
3878 */
3879 IEM_MC_BEGIN(0, 2);
3880 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3882
3883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3885 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3886 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3887
3888 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3889 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3890
3891 IEM_MC_ADVANCE_RIP();
3892 IEM_MC_END();
3893 }
3894 /* The register, register encoding is invalid. */
3895 else
3896 return IEMOP_RAISE_INVALID_OPCODE();
3897 return VINF_SUCCESS;
3898}
3899
3900/**
3901 * @opcode 0x2b
3902 * @opcodesub !11 mr/reg
3903 * @oppfx 0x66
3904 * @opcpuid sse2
3905 * @opgroup og_sse2_cachect
3906 * @opxcpttype 1
3907 * @optest op1=1 op2=2 -> op1=2
3908 * @optest op1=0 op2=-42 -> op1=-42
3909 */
3910FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3911{
3912 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3914 if (IEM_IS_MODRM_MEM_MODE(bRm))
3915 {
3916 /*
3917 * memory, register.
3918 */
3919 IEM_MC_BEGIN(0, 2);
3920 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3922
3923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3925 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3926 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3927
3928 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3929 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3930
3931 IEM_MC_ADVANCE_RIP();
3932 IEM_MC_END();
3933 }
3934 /* The register, register encoding is invalid. */
3935 else
3936 return IEMOP_RAISE_INVALID_OPCODE();
3937 return VINF_SUCCESS;
3938}
3939/* Opcode 0xf3 0x0f 0x2b - invalid */
3940/* Opcode 0xf2 0x0f 0x2b - invalid */
3941
3942
3943/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3944FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3945/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3946FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3947
3948
3949/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3950FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
3951{
3952 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3953
3954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3955 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3956 {
3957 if (IEM_IS_MODRM_REG_MODE(bRm))
3958 {
3959 /* greg64, XMM */
3960 IEM_MC_BEGIN(3, 4);
3961 IEM_MC_LOCAL(uint32_t, fMxcsr);
3962 IEM_MC_LOCAL(int64_t, i64Dst);
3963 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3964 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
3965 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
3966
3967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3968 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3969 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3970
3971 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3972 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
3973 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3974 IEM_MC_IF_MXCSR_XCPT_PENDING()
3975 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3976 IEM_MC_ELSE()
3977 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
3978 IEM_MC_ENDIF();
3979
3980 IEM_MC_ADVANCE_RIP();
3981 IEM_MC_END();
3982 }
3983 else
3984 {
3985 /* greg64, [mem64] */
3986 IEM_MC_BEGIN(3, 4);
3987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3988 IEM_MC_LOCAL(uint32_t, fMxcsr);
3989 IEM_MC_LOCAL(int64_t, i64Dst);
3990 IEM_MC_LOCAL(uint32_t, u32Src);
3991 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3992 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
3993 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
3994
3995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3997 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3998 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3999
4000 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4001 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4002 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4003 IEM_MC_IF_MXCSR_XCPT_PENDING()
4004 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4005 IEM_MC_ELSE()
4006 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4007 IEM_MC_ENDIF();
4008
4009 IEM_MC_ADVANCE_RIP();
4010 IEM_MC_END();
4011 }
4012 }
4013 else
4014 {
4015 if (IEM_IS_MODRM_REG_MODE(bRm))
4016 {
4017 /* greg, XMM */
4018 IEM_MC_BEGIN(3, 4);
4019 IEM_MC_LOCAL(uint32_t, fMxcsr);
4020 IEM_MC_LOCAL(int32_t, i32Dst);
4021 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4022 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4023 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4024
4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4026 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4027 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4028
4029 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4030 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4031 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4032 IEM_MC_IF_MXCSR_XCPT_PENDING()
4033 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4034 IEM_MC_ELSE()
4035 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4036 IEM_MC_ENDIF();
4037
4038 IEM_MC_ADVANCE_RIP();
4039 IEM_MC_END();
4040 }
4041 else
4042 {
4043 /* greg, [mem] */
4044 IEM_MC_BEGIN(3, 4);
4045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4046 IEM_MC_LOCAL(uint32_t, fMxcsr);
4047 IEM_MC_LOCAL(int32_t, i32Dst);
4048 IEM_MC_LOCAL(uint32_t, u32Src);
4049 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4050 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4051 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4052
4053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4055 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4056 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4057
4058 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4059 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4060 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4061 IEM_MC_IF_MXCSR_XCPT_PENDING()
4062 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4063 IEM_MC_ELSE()
4064 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4065 IEM_MC_ENDIF();
4066
4067 IEM_MC_ADVANCE_RIP();
4068 IEM_MC_END();
4069 }
4070 }
4071 return VINF_SUCCESS;
4072}
4073
4074
4075/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4076FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4077{
4078 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4079
4080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4081 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4082 {
4083 if (IEM_IS_MODRM_REG_MODE(bRm))
4084 {
4085 /* greg64, XMM */
4086 IEM_MC_BEGIN(3, 4);
4087 IEM_MC_LOCAL(uint32_t, fMxcsr);
4088 IEM_MC_LOCAL(int64_t, i64Dst);
4089 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4090 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4091 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4092
4093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4094 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4095 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4096
4097 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4098 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4099 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4100 IEM_MC_IF_MXCSR_XCPT_PENDING()
4101 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4102 IEM_MC_ELSE()
4103 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4104 IEM_MC_ENDIF();
4105
4106 IEM_MC_ADVANCE_RIP();
4107 IEM_MC_END();
4108 }
4109 else
4110 {
4111 /* greg64, [mem64] */
4112 IEM_MC_BEGIN(3, 4);
4113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4114 IEM_MC_LOCAL(uint32_t, fMxcsr);
4115 IEM_MC_LOCAL(int64_t, i64Dst);
4116 IEM_MC_LOCAL(uint64_t, u64Src);
4117 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4118 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4119 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4120
4121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4123 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4124 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4125
4126 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4127 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4128 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4129 IEM_MC_IF_MXCSR_XCPT_PENDING()
4130 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4131 IEM_MC_ELSE()
4132 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4133 IEM_MC_ENDIF();
4134
4135 IEM_MC_ADVANCE_RIP();
4136 IEM_MC_END();
4137 }
4138 }
4139 else
4140 {
4141 if (IEM_IS_MODRM_REG_MODE(bRm))
4142 {
4143 /* greg, XMM */
4144 IEM_MC_BEGIN(3, 4);
4145 IEM_MC_LOCAL(uint32_t, fMxcsr);
4146 IEM_MC_LOCAL(int32_t, i32Dst);
4147 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4148 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4149 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4150
4151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4152 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4153 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4154
4155 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4156 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4157 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4158 IEM_MC_IF_MXCSR_XCPT_PENDING()
4159 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4160 IEM_MC_ELSE()
4161 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4162 IEM_MC_ENDIF();
4163
4164 IEM_MC_ADVANCE_RIP();
4165 IEM_MC_END();
4166 }
4167 else
4168 {
4169 /* greg, [mem] */
4170 IEM_MC_BEGIN(3, 4);
4171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4172 IEM_MC_LOCAL(uint32_t, fMxcsr);
4173 IEM_MC_LOCAL(int32_t, i32Dst);
4174 IEM_MC_LOCAL(uint64_t, u64Src);
4175 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4176 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4177 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4178
4179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4181 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4182 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4183
4184 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4185 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4186 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4187 IEM_MC_IF_MXCSR_XCPT_PENDING()
4188 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4189 IEM_MC_ELSE()
4190 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4191 IEM_MC_ENDIF();
4192
4193 IEM_MC_ADVANCE_RIP();
4194 IEM_MC_END();
4195 }
4196 }
4197 return VINF_SUCCESS;
4198}
4199
4200
4201/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4202FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
4203/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4204FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
4205
4206
4207/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4208FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4209{
4210 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4211
4212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4213 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4214 {
4215 if (IEM_IS_MODRM_REG_MODE(bRm))
4216 {
4217 /* greg64, XMM */
4218 IEM_MC_BEGIN(3, 4);
4219 IEM_MC_LOCAL(uint32_t, fMxcsr);
4220 IEM_MC_LOCAL(int64_t, i64Dst);
4221 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4222 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4223 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4224
4225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4226 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4227 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4228
4229 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4230 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4231 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4232 IEM_MC_IF_MXCSR_XCPT_PENDING()
4233 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4234 IEM_MC_ELSE()
4235 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4236 IEM_MC_ENDIF();
4237
4238 IEM_MC_ADVANCE_RIP();
4239 IEM_MC_END();
4240 }
4241 else
4242 {
4243 /* greg64, [mem64] */
4244 IEM_MC_BEGIN(3, 4);
4245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4246 IEM_MC_LOCAL(uint32_t, fMxcsr);
4247 IEM_MC_LOCAL(int64_t, i64Dst);
4248 IEM_MC_LOCAL(uint32_t, u32Src);
4249 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4250 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4251 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4252
4253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4255 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4256 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4257
4258 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4259 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4260 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4261 IEM_MC_IF_MXCSR_XCPT_PENDING()
4262 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4263 IEM_MC_ELSE()
4264 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4265 IEM_MC_ENDIF();
4266
4267 IEM_MC_ADVANCE_RIP();
4268 IEM_MC_END();
4269 }
4270 }
4271 else
4272 {
4273 if (IEM_IS_MODRM_REG_MODE(bRm))
4274 {
4275 /* greg, XMM */
4276 IEM_MC_BEGIN(3, 4);
4277 IEM_MC_LOCAL(uint32_t, fMxcsr);
4278 IEM_MC_LOCAL(int32_t, i32Dst);
4279 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4280 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4281 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4282
4283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4284 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4285 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4286
4287 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4288 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4289 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4290 IEM_MC_IF_MXCSR_XCPT_PENDING()
4291 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4292 IEM_MC_ELSE()
4293 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4294 IEM_MC_ENDIF();
4295
4296 IEM_MC_ADVANCE_RIP();
4297 IEM_MC_END();
4298 }
4299 else
4300 {
4301 /* greg, [mem] */
4302 IEM_MC_BEGIN(3, 4);
4303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4304 IEM_MC_LOCAL(uint32_t, fMxcsr);
4305 IEM_MC_LOCAL(int32_t, i32Dst);
4306 IEM_MC_LOCAL(uint32_t, u32Src);
4307 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4308 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4309 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4310
4311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4313 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4314 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4315
4316 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4317 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4318 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4319 IEM_MC_IF_MXCSR_XCPT_PENDING()
4320 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4321 IEM_MC_ELSE()
4322 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4323 IEM_MC_ENDIF();
4324
4325 IEM_MC_ADVANCE_RIP();
4326 IEM_MC_END();
4327 }
4328 }
4329 return VINF_SUCCESS;
4330}
4331
4332
4333/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4334FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4335{
4336 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4337
4338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4339 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4340 {
4341 if (IEM_IS_MODRM_REG_MODE(bRm))
4342 {
4343 /* greg64, XMM */
4344 IEM_MC_BEGIN(3, 4);
4345 IEM_MC_LOCAL(uint32_t, fMxcsr);
4346 IEM_MC_LOCAL(int64_t, i64Dst);
4347 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4348 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4349 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4350
4351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4352 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4353 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4354
4355 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4356 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4357 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4358 IEM_MC_IF_MXCSR_XCPT_PENDING()
4359 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4360 IEM_MC_ELSE()
4361 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4362 IEM_MC_ENDIF();
4363
4364 IEM_MC_ADVANCE_RIP();
4365 IEM_MC_END();
4366 }
4367 else
4368 {
4369 /* greg64, [mem64] */
4370 IEM_MC_BEGIN(3, 4);
4371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4372 IEM_MC_LOCAL(uint32_t, fMxcsr);
4373 IEM_MC_LOCAL(int64_t, i64Dst);
4374 IEM_MC_LOCAL(uint64_t, u64Src);
4375 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4376 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4377 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4378
4379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4382 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4383
4384 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4385 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4386 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4387 IEM_MC_IF_MXCSR_XCPT_PENDING()
4388 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4389 IEM_MC_ELSE()
4390 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4391 IEM_MC_ENDIF();
4392
4393 IEM_MC_ADVANCE_RIP();
4394 IEM_MC_END();
4395 }
4396 }
4397 else
4398 {
4399 if (IEM_IS_MODRM_REG_MODE(bRm))
4400 {
4401 /* greg, XMM */
4402 IEM_MC_BEGIN(3, 4);
4403 IEM_MC_LOCAL(uint32_t, fMxcsr);
4404 IEM_MC_LOCAL(int32_t, i32Dst);
4405 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4406 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4407 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4408
4409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4410 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4411 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4412
4413 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4414 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4415 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4416 IEM_MC_IF_MXCSR_XCPT_PENDING()
4417 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4418 IEM_MC_ELSE()
4419 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4420 IEM_MC_ENDIF();
4421
4422 IEM_MC_ADVANCE_RIP();
4423 IEM_MC_END();
4424 }
4425 else
4426 {
4427 /* greg, [mem] */
4428 IEM_MC_BEGIN(3, 4);
4429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4430 IEM_MC_LOCAL(uint32_t, fMxcsr);
4431 IEM_MC_LOCAL(int32_t, i32Dst);
4432 IEM_MC_LOCAL(uint64_t, u64Src);
4433 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4434 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4435 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4436
4437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4439 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4440 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4441
4442 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4443 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4444 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4445 IEM_MC_IF_MXCSR_XCPT_PENDING()
4446 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4447 IEM_MC_ELSE()
4448 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4449 IEM_MC_ENDIF();
4450
4451 IEM_MC_ADVANCE_RIP();
4452 IEM_MC_END();
4453 }
4454 }
4455 return VINF_SUCCESS;
4456}
4457
4458
4459/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4460FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4461{
4462 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4464 if (IEM_IS_MODRM_REG_MODE(bRm))
4465 {
4466 /*
4467 * Register, register.
4468 */
4469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4470 IEM_MC_BEGIN(4, 1);
4471 IEM_MC_LOCAL(uint32_t, fEFlags);
4472 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4473 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4474 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4475 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4477 IEM_MC_PREPARE_SSE_USAGE();
4478 IEM_MC_FETCH_EFLAGS(fEFlags);
4479 IEM_MC_REF_MXCSR(pfMxcsr);
4480 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4481 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4482 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4483 IEM_MC_IF_MXCSR_XCPT_PENDING()
4484 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4485 IEM_MC_ELSE()
4486 IEM_MC_COMMIT_EFLAGS(fEFlags);
4487 IEM_MC_ENDIF();
4488
4489 IEM_MC_ADVANCE_RIP();
4490 IEM_MC_END();
4491 }
4492 else
4493 {
4494 /*
4495 * Register, memory.
4496 */
4497 IEM_MC_BEGIN(4, 3);
4498 IEM_MC_LOCAL(uint32_t, fEFlags);
4499 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4500 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4501 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4502 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4503 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4505
4506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4508 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4509 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4510
4511 IEM_MC_PREPARE_SSE_USAGE();
4512 IEM_MC_REF_MXCSR(pfMxcsr);
4513 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4514 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4515 IEM_MC_IF_MXCSR_XCPT_PENDING()
4516 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4517 IEM_MC_ELSE()
4518 IEM_MC_COMMIT_EFLAGS(fEFlags);
4519 IEM_MC_ENDIF();
4520
4521 IEM_MC_ADVANCE_RIP();
4522 IEM_MC_END();
4523 }
4524 return VINF_SUCCESS;
4525}
4526
4527
4528/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4529FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4530{
4531 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4533 if (IEM_IS_MODRM_REG_MODE(bRm))
4534 {
4535 /*
4536 * Register, register.
4537 */
4538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4539 IEM_MC_BEGIN(4, 1);
4540 IEM_MC_LOCAL(uint32_t, fEFlags);
4541 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4542 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4543 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4544 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4545 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4546 IEM_MC_PREPARE_SSE_USAGE();
4547 IEM_MC_FETCH_EFLAGS(fEFlags);
4548 IEM_MC_REF_MXCSR(pfMxcsr);
4549 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4550 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4551 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4552 IEM_MC_IF_MXCSR_XCPT_PENDING()
4553 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4554 IEM_MC_ELSE()
4555 IEM_MC_COMMIT_EFLAGS(fEFlags);
4556 IEM_MC_ENDIF();
4557
4558 IEM_MC_ADVANCE_RIP();
4559 IEM_MC_END();
4560 }
4561 else
4562 {
4563 /*
4564 * Register, memory.
4565 */
4566 IEM_MC_BEGIN(4, 3);
4567 IEM_MC_LOCAL(uint32_t, fEFlags);
4568 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4569 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4570 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4571 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4572 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4574
4575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4577 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4578 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4579
4580 IEM_MC_PREPARE_SSE_USAGE();
4581 IEM_MC_REF_MXCSR(pfMxcsr);
4582 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4583 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4584 IEM_MC_IF_MXCSR_XCPT_PENDING()
4585 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4586 IEM_MC_ELSE()
4587 IEM_MC_COMMIT_EFLAGS(fEFlags);
4588 IEM_MC_ENDIF();
4589
4590 IEM_MC_ADVANCE_RIP();
4591 IEM_MC_END();
4592 }
4593 return VINF_SUCCESS;
4594}
4595
4596
4597/* Opcode 0xf3 0x0f 0x2e - invalid */
4598/* Opcode 0xf2 0x0f 0x2e - invalid */
4599
4600
4601/** Opcode 0x0f 0x2f - comiss Vss, Wss */
4602FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4603{
4604 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4606 if (IEM_IS_MODRM_REG_MODE(bRm))
4607 {
4608 /*
4609 * Register, register.
4610 */
4611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4612 IEM_MC_BEGIN(4, 1);
4613 IEM_MC_LOCAL(uint32_t, fEFlags);
4614 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4615 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4616 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4617 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4618 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4619 IEM_MC_PREPARE_SSE_USAGE();
4620 IEM_MC_FETCH_EFLAGS(fEFlags);
4621 IEM_MC_REF_MXCSR(pfMxcsr);
4622 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4623 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4624 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4625 IEM_MC_IF_MXCSR_XCPT_PENDING()
4626 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4627 IEM_MC_ELSE()
4628 IEM_MC_COMMIT_EFLAGS(fEFlags);
4629 IEM_MC_ENDIF();
4630
4631 IEM_MC_ADVANCE_RIP();
4632 IEM_MC_END();
4633 }
4634 else
4635 {
4636 /*
4637 * Register, memory.
4638 */
4639 IEM_MC_BEGIN(4, 3);
4640 IEM_MC_LOCAL(uint32_t, fEFlags);
4641 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4642 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4643 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4644 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4645 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4647
4648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4651 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4652
4653 IEM_MC_PREPARE_SSE_USAGE();
4654 IEM_MC_REF_MXCSR(pfMxcsr);
4655 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4656 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4657 IEM_MC_IF_MXCSR_XCPT_PENDING()
4658 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4659 IEM_MC_ELSE()
4660 IEM_MC_COMMIT_EFLAGS(fEFlags);
4661 IEM_MC_ENDIF();
4662
4663 IEM_MC_ADVANCE_RIP();
4664 IEM_MC_END();
4665 }
4666 return VINF_SUCCESS;
4667}
4668
4669
4670/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
4671FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4672{
4673 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4675 if (IEM_IS_MODRM_REG_MODE(bRm))
4676 {
4677 /*
4678 * Register, register.
4679 */
4680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4681 IEM_MC_BEGIN(4, 1);
4682 IEM_MC_LOCAL(uint32_t, fEFlags);
4683 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4684 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4685 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4686 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4687 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4688 IEM_MC_PREPARE_SSE_USAGE();
4689 IEM_MC_FETCH_EFLAGS(fEFlags);
4690 IEM_MC_REF_MXCSR(pfMxcsr);
4691 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4692 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4693 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4694 IEM_MC_IF_MXCSR_XCPT_PENDING()
4695 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4696 IEM_MC_ELSE()
4697 IEM_MC_COMMIT_EFLAGS(fEFlags);
4698 IEM_MC_ENDIF();
4699
4700 IEM_MC_ADVANCE_RIP();
4701 IEM_MC_END();
4702 }
4703 else
4704 {
4705 /*
4706 * Register, memory.
4707 */
4708 IEM_MC_BEGIN(4, 3);
4709 IEM_MC_LOCAL(uint32_t, fEFlags);
4710 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4711 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4712 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4713 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4714 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4716
4717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4719 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4720 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4721
4722 IEM_MC_PREPARE_SSE_USAGE();
4723 IEM_MC_REF_MXCSR(pfMxcsr);
4724 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4725 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4726 IEM_MC_IF_MXCSR_XCPT_PENDING()
4727 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4728 IEM_MC_ELSE()
4729 IEM_MC_COMMIT_EFLAGS(fEFlags);
4730 IEM_MC_ENDIF();
4731
4732 IEM_MC_ADVANCE_RIP();
4733 IEM_MC_END();
4734 }
4735 return VINF_SUCCESS;
4736}
4737
4738
4739/* Opcode 0xf3 0x0f 0x2f - invalid */
4740/* Opcode 0xf2 0x0f 0x2f - invalid */
4741
4742/** Opcode 0x0f 0x30. */
4743FNIEMOP_DEF(iemOp_wrmsr)
4744{
4745 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4747 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
4748}
4749
4750
4751/** Opcode 0x0f 0x31. */
4752FNIEMOP_DEF(iemOp_rdtsc)
4753{
4754 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4756 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
4757}
4758
4759
4760/** Opcode 0x0f 0x33. */
4761FNIEMOP_DEF(iemOp_rdmsr)
4762{
4763 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4765 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
4766}
4767
4768
4769/** Opcode 0x0f 0x34. */
4770FNIEMOP_DEF(iemOp_rdpmc)
4771{
4772 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4774 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
4775}
4776
4777
4778/** Opcode 0x0f 0x34. */
4779FNIEMOP_DEF(iemOp_sysenter)
4780{
4781 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4783 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
4784}
4785
4786/** Opcode 0x0f 0x35. */
4787FNIEMOP_DEF(iemOp_sysexit)
4788{
4789 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4791 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4792}
4793
4794/** Opcode 0x0f 0x37. */
4795FNIEMOP_STUB(iemOp_getsec);
4796
4797
4798/** Opcode 0x0f 0x38. */
4799FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4800{
4801#ifdef IEM_WITH_THREE_0F_38
4802 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4803 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4804#else
4805 IEMOP_BITCH_ABOUT_STUB();
4806 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4807#endif
4808}
4809
4810
4811/** Opcode 0x0f 0x3a. */
4812FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4813{
4814#ifdef IEM_WITH_THREE_0F_3A
4815 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4816 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4817#else
4818 IEMOP_BITCH_ABOUT_STUB();
4819 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4820#endif
4821}
4822
4823
4824/**
4825 * Implements a conditional move.
4826 *
4827 * Wish there was an obvious way to do this where we could share and reduce
4828 * code bloat.
4829 *
4830 * @param a_Cnd The conditional "microcode" operation.
4831 */
4832#define CMOV_X(a_Cnd) \
4833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4834 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4835 { \
4836 switch (pVCpu->iem.s.enmEffOpSize) \
4837 { \
4838 case IEMMODE_16BIT: \
4839 IEM_MC_BEGIN(0, 1); \
4840 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4841 a_Cnd { \
4842 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4843 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4844 } IEM_MC_ENDIF(); \
4845 IEM_MC_ADVANCE_RIP(); \
4846 IEM_MC_END(); \
4847 return VINF_SUCCESS; \
4848 \
4849 case IEMMODE_32BIT: \
4850 IEM_MC_BEGIN(0, 1); \
4851 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4852 a_Cnd { \
4853 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4854 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4855 } IEM_MC_ELSE() { \
4856 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4857 } IEM_MC_ENDIF(); \
4858 IEM_MC_ADVANCE_RIP(); \
4859 IEM_MC_END(); \
4860 return VINF_SUCCESS; \
4861 \
4862 case IEMMODE_64BIT: \
4863 IEM_MC_BEGIN(0, 1); \
4864 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4865 a_Cnd { \
4866 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4867 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4868 } IEM_MC_ENDIF(); \
4869 IEM_MC_ADVANCE_RIP(); \
4870 IEM_MC_END(); \
4871 return VINF_SUCCESS; \
4872 \
4873 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4874 } \
4875 } \
4876 else \
4877 { \
4878 switch (pVCpu->iem.s.enmEffOpSize) \
4879 { \
4880 case IEMMODE_16BIT: \
4881 IEM_MC_BEGIN(0, 2); \
4882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4883 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4885 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4886 a_Cnd { \
4887 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4888 } IEM_MC_ENDIF(); \
4889 IEM_MC_ADVANCE_RIP(); \
4890 IEM_MC_END(); \
4891 return VINF_SUCCESS; \
4892 \
4893 case IEMMODE_32BIT: \
4894 IEM_MC_BEGIN(0, 2); \
4895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4896 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4898 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4899 a_Cnd { \
4900 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4901 } IEM_MC_ELSE() { \
4902 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4903 } IEM_MC_ENDIF(); \
4904 IEM_MC_ADVANCE_RIP(); \
4905 IEM_MC_END(); \
4906 return VINF_SUCCESS; \
4907 \
4908 case IEMMODE_64BIT: \
4909 IEM_MC_BEGIN(0, 2); \
4910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4911 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4913 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4914 a_Cnd { \
4915 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4916 } IEM_MC_ENDIF(); \
4917 IEM_MC_ADVANCE_RIP(); \
4918 IEM_MC_END(); \
4919 return VINF_SUCCESS; \
4920 \
4921 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4922 } \
4923 } do {} while (0)
4924
4925
4926
4927/** Opcode 0x0f 0x40. */
4928FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
4929{
4930 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
4931 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
4932}
4933
4934
4935/** Opcode 0x0f 0x41. */
4936FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
4937{
4938 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
4939 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
4940}
4941
4942
4943/** Opcode 0x0f 0x42. */
4944FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
4945{
4946 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
4947 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
4948}
4949
4950
4951/** Opcode 0x0f 0x43. */
4952FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
4953{
4954 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
4955 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
4956}
4957
4958
4959/** Opcode 0x0f 0x44. */
4960FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
4961{
4962 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
4963 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
4964}
4965
4966
4967/** Opcode 0x0f 0x45. */
4968FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
4969{
4970 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
4971 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
4972}
4973
4974
4975/** Opcode 0x0f 0x46. */
4976FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
4977{
4978 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
4979 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
4980}
4981
4982
4983/** Opcode 0x0f 0x47. */
4984FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
4985{
4986 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
4987 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
4988}
4989
4990
4991/** Opcode 0x0f 0x48. */
4992FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
4993{
4994 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
4995 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
4996}
4997
4998
4999/** Opcode 0x0f 0x49. */
5000FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5001{
5002 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5003 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5004}
5005
5006
5007/** Opcode 0x0f 0x4a. */
5008FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5009{
5010 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5011 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5012}
5013
5014
5015/** Opcode 0x0f 0x4b. */
5016FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5017{
5018 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5019 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5020}
5021
5022
5023/** Opcode 0x0f 0x4c. */
5024FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5025{
5026 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5027 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5028}
5029
5030
5031/** Opcode 0x0f 0x4d. */
5032FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5033{
5034 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5035 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5036}
5037
5038
5039/** Opcode 0x0f 0x4e. */
5040FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5041{
5042 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5043 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5044}
5045
5046
5047/** Opcode 0x0f 0x4f. */
5048FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5049{
5050 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5051 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5052}
5053
5054#undef CMOV_X
5055
5056/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5057FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5058{
5059 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5061 if (IEM_IS_MODRM_REG_MODE(bRm))
5062 {
5063 /*
5064 * Register, register.
5065 */
5066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5067 IEM_MC_BEGIN(2, 1);
5068 IEM_MC_LOCAL(uint8_t, u8Dst);
5069 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5070 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5071 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5072 IEM_MC_PREPARE_SSE_USAGE();
5073 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5074 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5075 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5076 IEM_MC_ADVANCE_RIP();
5077 IEM_MC_END();
5078 return VINF_SUCCESS;
5079 }
5080
5081 /* No memory operand. */
5082 return IEMOP_RAISE_INVALID_OPCODE();
5083}
5084
5085
5086/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5087FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5088{
5089 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5091 if (IEM_IS_MODRM_REG_MODE(bRm))
5092 {
5093 /*
5094 * Register, register.
5095 */
5096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5097 IEM_MC_BEGIN(2, 1);
5098 IEM_MC_LOCAL(uint8_t, u8Dst);
5099 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5100 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5101 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5102 IEM_MC_PREPARE_SSE_USAGE();
5103 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5104 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5105 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5106 IEM_MC_ADVANCE_RIP();
5107 IEM_MC_END();
5108 return VINF_SUCCESS;
5109 }
5110
5111 /* No memory operand. */
5112 return IEMOP_RAISE_INVALID_OPCODE();
5113
5114}
5115
5116
5117/* Opcode 0xf3 0x0f 0x50 - invalid */
5118/* Opcode 0xf2 0x0f 0x50 - invalid */
5119
5120
5121/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5122FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5123{
5124 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5125 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5126}
5127
5128
5129/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5130FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5131{
5132 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5133 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5134}
5135
5136
5137/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5138FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5139{
5140 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5141 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5142}
5143
5144
5145/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5146FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5147{
5148 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5149 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5150}
5151
5152
5153/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5154FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
5155/* Opcode 0x66 0x0f 0x52 - invalid */
5156/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5157FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
5158/* Opcode 0xf2 0x0f 0x52 - invalid */
5159
5160/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5161FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5162/* Opcode 0x66 0x0f 0x53 - invalid */
5163/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5164FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5165/* Opcode 0xf2 0x0f 0x53 - invalid */
5166
5167
5168/** Opcode 0x0f 0x54 - andps Vps, Wps */
5169FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5170{
5171 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5172 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5173}
5174
5175
5176/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5177FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5178{
5179 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5180 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5181}
5182
5183
5184/* Opcode 0xf3 0x0f 0x54 - invalid */
5185/* Opcode 0xf2 0x0f 0x54 - invalid */
5186
5187
5188/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5189FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5190{
5191 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5192 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5193}
5194
5195
5196/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5197FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5198{
5199 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5200 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5201}
5202
5203
5204/* Opcode 0xf3 0x0f 0x55 - invalid */
5205/* Opcode 0xf2 0x0f 0x55 - invalid */
5206
5207
5208/** Opcode 0x0f 0x56 - orps Vps, Wps */
5209FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5210{
5211 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5212 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5213}
5214
5215
5216/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5217FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5218{
5219 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5220 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5221}
5222
5223
5224/* Opcode 0xf3 0x0f 0x56 - invalid */
5225/* Opcode 0xf2 0x0f 0x56 - invalid */
5226
5227
5228/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5229FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5230{
5231 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5232 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5233}
5234
5235
5236/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5237FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5238{
5239 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5240 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5241}
5242
5243
5244/* Opcode 0xf3 0x0f 0x57 - invalid */
5245/* Opcode 0xf2 0x0f 0x57 - invalid */
5246
5247/** Opcode 0x0f 0x58 - addps Vps, Wps */
5248FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5249{
5250 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5251 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5252}
5253
5254
5255/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5256FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5257{
5258 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5259 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5260}
5261
5262
5263/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5264FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5265{
5266 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5267 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5268}
5269
5270
5271/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5272FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5273{
5274 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5275 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5276}
5277
5278
5279/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5280FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5281{
5282 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5283 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5284}
5285
5286
5287/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5288FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5289{
5290 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5291 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5292}
5293
5294
5295/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5296FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5297{
5298 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5299 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5300}
5301
5302
5303/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5304FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5305{
5306 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5307 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5308}
5309
5310
5311/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5312FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5313{
5314 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5315 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5316}
5317
5318
5319/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5320FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5321{
5322 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5323 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5324}
5325
5326
5327/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5328FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5329{
5330 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5331 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5332}
5333
5334
5335/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5336FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5337{
5338 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5339 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5340}
5341
5342
5343/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5344FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5345{
5346 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5347 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5348}
5349
5350
5351/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5352FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5353{
5354 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5355 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5356}
5357
5358
5359/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5360FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5361{
5362 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5363 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5364}
5365
5366
5367/* Opcode 0xf2 0x0f 0x5b - invalid */
5368
5369
5370/** Opcode 0x0f 0x5c - subps Vps, Wps */
5371FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5372{
5373 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5374 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5375}
5376
5377
5378/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5379FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5380{
5381 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5382 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5383}
5384
5385
5386/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5387FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5388{
5389 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5390 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5391}
5392
5393
5394/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5395FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5396{
5397 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5398 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5399}
5400
5401
5402/** Opcode 0x0f 0x5d - minps Vps, Wps */
5403FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5404{
5405 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5406 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5407}
5408
5409
5410/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5411FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5412{
5413 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5414 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5415}
5416
5417
5418/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5419FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5420{
5421 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5422 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5423}
5424
5425
5426/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5427FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5428{
5429 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5430 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5431}
5432
5433
5434/** Opcode 0x0f 0x5e - divps Vps, Wps */
5435FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5436{
5437 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5438 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5439}
5440
5441
5442/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5443FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5444{
5445 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5446 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5447}
5448
5449
5450/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5451FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5452{
5453 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5454 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5455}
5456
5457
5458/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5459FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5460{
5461 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5462 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5463}
5464
5465
5466/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5467FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5468{
5469 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5470 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5471}
5472
5473
5474/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5475FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5476{
5477 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5478 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5479}
5480
5481
5482/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5483FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5484{
5485 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5486 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5487}
5488
5489
5490/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5491FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5492{
5493 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5494 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5495}
5496
5497
5498/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5499FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5500{
5501 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5502 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5503}
5504
5505
5506/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5507FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5508{
5509 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5510 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5511}
5512
5513
5514/* Opcode 0xf3 0x0f 0x60 - invalid */
5515
5516
5517/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5518FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5519{
5520 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5521 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5522 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5523}
5524
5525
5526/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5527FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5528{
5529 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5530 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5531}
5532
5533
5534/* Opcode 0xf3 0x0f 0x61 - invalid */
5535
5536
5537/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5538FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5539{
5540 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5541 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5542}
5543
5544
5545/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5546FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5547{
5548 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5549 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5550}
5551
5552
5553/* Opcode 0xf3 0x0f 0x62 - invalid */
5554
5555
5556
5557/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5558FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5559{
5560 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5561 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5562}
5563
5564
5565/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5566FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5567{
5568 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5569 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5570}
5571
5572
5573/* Opcode 0xf3 0x0f 0x63 - invalid */
5574
5575
5576/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5577FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5578{
5579 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5580 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5581}
5582
5583
5584/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5585FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5586{
5587 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5588 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5589}
5590
5591
5592/* Opcode 0xf3 0x0f 0x64 - invalid */
5593
5594
5595/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5596FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5597{
5598 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5599 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5600}
5601
5602
5603/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5604FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5605{
5606 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5607 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5608}
5609
5610
5611/* Opcode 0xf3 0x0f 0x65 - invalid */
5612
5613
5614/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5615FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5616{
5617 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5618 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5619}
5620
5621
5622/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5623FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5624{
5625 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5626 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5627}
5628
5629
5630/* Opcode 0xf3 0x0f 0x66 - invalid */
5631
5632
5633/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5634FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5635{
5636 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5637 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5638}
5639
5640
5641/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5642FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5643{
5644 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5645 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
5646}
5647
5648
5649/* Opcode 0xf3 0x0f 0x67 - invalid */
5650
5651
5652/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5653 * @note Intel and AMD both uses Qd for the second parameter, however they
5654 * both list it as a mmX/mem64 operand and intel describes it as being
5655 * loaded as a qword, so it should be Qq, shouldn't it? */
5656FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5657{
5658 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5659 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5660}
5661
5662
5663/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5664FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5665{
5666 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5667 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5668}
5669
5670
5671/* Opcode 0xf3 0x0f 0x68 - invalid */
5672
5673
5674/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5675 * @note Intel and AMD both uses Qd for the second parameter, however they
5676 * both list it as a mmX/mem64 operand and intel describes it as being
5677 * loaded as a qword, so it should be Qq, shouldn't it? */
5678FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5679{
5680 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5681 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5682}
5683
5684
5685/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5686FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5687{
5688 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5689 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5690
5691}
5692
5693
5694/* Opcode 0xf3 0x0f 0x69 - invalid */
5695
5696
5697/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5698 * @note Intel and AMD both uses Qd for the second parameter, however they
5699 * both list it as a mmX/mem64 operand and intel describes it as being
5700 * loaded as a qword, so it should be Qq, shouldn't it? */
5701FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5702{
5703 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5704 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5705}
5706
5707
5708/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5709FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5710{
5711 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5712 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5713}
5714
5715
5716/* Opcode 0xf3 0x0f 0x6a - invalid */
5717
5718
5719/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5720FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5721{
5722 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5723 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5724}
5725
5726
5727/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5728FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5729{
5730 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5731 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5732}
5733
5734
5735/* Opcode 0xf3 0x0f 0x6b - invalid */
5736
5737
5738/* Opcode 0x0f 0x6c - invalid */
5739
5740
5741/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5742FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5743{
5744 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5745 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5746}
5747
5748
5749/* Opcode 0xf3 0x0f 0x6c - invalid */
5750/* Opcode 0xf2 0x0f 0x6c - invalid */
5751
5752
5753/* Opcode 0x0f 0x6d - invalid */
5754
5755
5756/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5757FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5758{
5759 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5760 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5761}
5762
5763
5764/* Opcode 0xf3 0x0f 0x6d - invalid */
5765
5766
5767FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
5768{
5769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5770 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5771 {
5772 /**
5773 * @opcode 0x6e
5774 * @opcodesub rex.w=1
5775 * @oppfx none
5776 * @opcpuid mmx
5777 * @opgroup og_mmx_datamove
5778 * @opxcpttype 5
5779 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5780 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5781 */
5782 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5783 if (IEM_IS_MODRM_REG_MODE(bRm))
5784 {
5785 /* MMX, greg64 */
5786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5787 IEM_MC_BEGIN(0, 1);
5788 IEM_MC_LOCAL(uint64_t, u64Tmp);
5789
5790 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5791 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5792 IEM_MC_FPU_TO_MMX_MODE();
5793
5794 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5795 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5796
5797 IEM_MC_ADVANCE_RIP();
5798 IEM_MC_END();
5799 }
5800 else
5801 {
5802 /* MMX, [mem64] */
5803 IEM_MC_BEGIN(0, 2);
5804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5805 IEM_MC_LOCAL(uint64_t, u64Tmp);
5806
5807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5809 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5810 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5811 IEM_MC_FPU_TO_MMX_MODE();
5812
5813 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5814 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5815
5816 IEM_MC_ADVANCE_RIP();
5817 IEM_MC_END();
5818 }
5819 }
5820 else
5821 {
5822 /**
5823 * @opdone
5824 * @opcode 0x6e
5825 * @opcodesub rex.w=0
5826 * @oppfx none
5827 * @opcpuid mmx
5828 * @opgroup og_mmx_datamove
5829 * @opxcpttype 5
5830 * @opfunction iemOp_movd_q_Pd_Ey
5831 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5832 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5833 */
5834 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5835 if (IEM_IS_MODRM_REG_MODE(bRm))
5836 {
5837 /* MMX, greg */
5838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5839 IEM_MC_BEGIN(0, 1);
5840 IEM_MC_LOCAL(uint64_t, u64Tmp);
5841
5842 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5843 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5844 IEM_MC_FPU_TO_MMX_MODE();
5845
5846 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5847 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5848
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 }
5852 else
5853 {
5854 /* MMX, [mem] */
5855 IEM_MC_BEGIN(0, 2);
5856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5857 IEM_MC_LOCAL(uint32_t, u32Tmp);
5858
5859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5861 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5862 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5863 IEM_MC_FPU_TO_MMX_MODE();
5864
5865 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5866 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
5867
5868 IEM_MC_ADVANCE_RIP();
5869 IEM_MC_END();
5870 }
5871 }
5872 return VINF_SUCCESS;
5873}
5874
5875FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
5876{
5877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5878 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5879 {
5880 /**
5881 * @opcode 0x6e
5882 * @opcodesub rex.w=1
5883 * @oppfx 0x66
5884 * @opcpuid sse2
5885 * @opgroup og_sse2_simdint_datamove
5886 * @opxcpttype 5
5887 * @optest 64-bit / op1=1 op2=2 -> op1=2
5888 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5889 */
5890 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5891 if (IEM_IS_MODRM_REG_MODE(bRm))
5892 {
5893 /* XMM, greg64 */
5894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5895 IEM_MC_BEGIN(0, 1);
5896 IEM_MC_LOCAL(uint64_t, u64Tmp);
5897
5898 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5899 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5900
5901 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5902 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
5903
5904 IEM_MC_ADVANCE_RIP();
5905 IEM_MC_END();
5906 }
5907 else
5908 {
5909 /* XMM, [mem64] */
5910 IEM_MC_BEGIN(0, 2);
5911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5912 IEM_MC_LOCAL(uint64_t, u64Tmp);
5913
5914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5916 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5917 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5918
5919 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5920 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
5921
5922 IEM_MC_ADVANCE_RIP();
5923 IEM_MC_END();
5924 }
5925 }
5926 else
5927 {
5928 /**
5929 * @opdone
5930 * @opcode 0x6e
5931 * @opcodesub rex.w=0
5932 * @oppfx 0x66
5933 * @opcpuid sse2
5934 * @opgroup og_sse2_simdint_datamove
5935 * @opxcpttype 5
5936 * @opfunction iemOp_movd_q_Vy_Ey
5937 * @optest op1=1 op2=2 -> op1=2
5938 * @optest op1=0 op2=-42 -> op1=-42
5939 */
5940 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5941 if (IEM_IS_MODRM_REG_MODE(bRm))
5942 {
5943 /* XMM, greg32 */
5944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5945 IEM_MC_BEGIN(0, 1);
5946 IEM_MC_LOCAL(uint32_t, u32Tmp);
5947
5948 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5949 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5950
5951 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5952 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
5953
5954 IEM_MC_ADVANCE_RIP();
5955 IEM_MC_END();
5956 }
5957 else
5958 {
5959 /* XMM, [mem32] */
5960 IEM_MC_BEGIN(0, 2);
5961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5962 IEM_MC_LOCAL(uint32_t, u32Tmp);
5963
5964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5966 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5967 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5968
5969 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5970 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
5971
5972 IEM_MC_ADVANCE_RIP();
5973 IEM_MC_END();
5974 }
5975 }
5976 return VINF_SUCCESS;
5977}
5978
5979/* Opcode 0xf3 0x0f 0x6e - invalid */
5980
5981
5982/**
5983 * @opcode 0x6f
5984 * @oppfx none
5985 * @opcpuid mmx
5986 * @opgroup og_mmx_datamove
5987 * @opxcpttype 5
5988 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5989 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5990 */
5991FNIEMOP_DEF(iemOp_movq_Pq_Qq)
5992{
5993 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5995 if (IEM_IS_MODRM_REG_MODE(bRm))
5996 {
5997 /*
5998 * Register, register.
5999 */
6000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6001 IEM_MC_BEGIN(0, 1);
6002 IEM_MC_LOCAL(uint64_t, u64Tmp);
6003
6004 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6005 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6006 IEM_MC_FPU_TO_MMX_MODE();
6007
6008 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6009 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6010
6011 IEM_MC_ADVANCE_RIP();
6012 IEM_MC_END();
6013 }
6014 else
6015 {
6016 /*
6017 * Register, memory.
6018 */
6019 IEM_MC_BEGIN(0, 2);
6020 IEM_MC_LOCAL(uint64_t, u64Tmp);
6021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6022
6023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6025 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6026 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6027 IEM_MC_FPU_TO_MMX_MODE();
6028
6029 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6030 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6031
6032 IEM_MC_ADVANCE_RIP();
6033 IEM_MC_END();
6034 }
6035 return VINF_SUCCESS;
6036}
6037
6038/**
6039 * @opcode 0x6f
6040 * @oppfx 0x66
6041 * @opcpuid sse2
6042 * @opgroup og_sse2_simdint_datamove
6043 * @opxcpttype 1
6044 * @optest op1=1 op2=2 -> op1=2
6045 * @optest op1=0 op2=-42 -> op1=-42
6046 */
6047FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6048{
6049 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6051 if (IEM_IS_MODRM_REG_MODE(bRm))
6052 {
6053 /*
6054 * Register, register.
6055 */
6056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6057 IEM_MC_BEGIN(0, 0);
6058
6059 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6060 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6061
6062 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6063 IEM_GET_MODRM_RM(pVCpu, bRm));
6064 IEM_MC_ADVANCE_RIP();
6065 IEM_MC_END();
6066 }
6067 else
6068 {
6069 /*
6070 * Register, memory.
6071 */
6072 IEM_MC_BEGIN(0, 2);
6073 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6075
6076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6078 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6079 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6080
6081 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6082 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6083
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 }
6087 return VINF_SUCCESS;
6088}
6089
6090/**
6091 * @opcode 0x6f
6092 * @oppfx 0xf3
6093 * @opcpuid sse2
6094 * @opgroup og_sse2_simdint_datamove
6095 * @opxcpttype 4UA
6096 * @optest op1=1 op2=2 -> op1=2
6097 * @optest op1=0 op2=-42 -> op1=-42
6098 */
6099FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6100{
6101 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6103 if (IEM_IS_MODRM_REG_MODE(bRm))
6104 {
6105 /*
6106 * Register, register.
6107 */
6108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6109 IEM_MC_BEGIN(0, 0);
6110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6111 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6112 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6113 IEM_GET_MODRM_RM(pVCpu, bRm));
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 }
6117 else
6118 {
6119 /*
6120 * Register, memory.
6121 */
6122 IEM_MC_BEGIN(0, 2);
6123 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6125
6126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6128 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6129 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6130 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6131 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6132
6133 IEM_MC_ADVANCE_RIP();
6134 IEM_MC_END();
6135 }
6136 return VINF_SUCCESS;
6137}
6138
6139
6140/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6141FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6142{
6143 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6145 if (IEM_IS_MODRM_REG_MODE(bRm))
6146 {
6147 /*
6148 * Register, register.
6149 */
6150 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6152
6153 IEM_MC_BEGIN(3, 0);
6154 IEM_MC_ARG(uint64_t *, pDst, 0);
6155 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6156 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6158 IEM_MC_PREPARE_FPU_USAGE();
6159 IEM_MC_FPU_TO_MMX_MODE();
6160
6161 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6162 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6164 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6165
6166 IEM_MC_ADVANCE_RIP();
6167 IEM_MC_END();
6168 }
6169 else
6170 {
6171 /*
6172 * Register, memory.
6173 */
6174 IEM_MC_BEGIN(3, 2);
6175 IEM_MC_ARG(uint64_t *, pDst, 0);
6176 IEM_MC_LOCAL(uint64_t, uSrc);
6177 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6179
6180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6181 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6182 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6184 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6185 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6186
6187 IEM_MC_PREPARE_FPU_USAGE();
6188 IEM_MC_FPU_TO_MMX_MODE();
6189
6190 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6191 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6192 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6193
6194 IEM_MC_ADVANCE_RIP();
6195 IEM_MC_END();
6196 }
6197 return VINF_SUCCESS;
6198}
6199
6200
6201/**
6202 * Common worker for SSE2 instructions on the forms:
6203 * pshufd xmm1, xmm2/mem128, imm8
6204 * pshufhw xmm1, xmm2/mem128, imm8
6205 * pshuflw xmm1, xmm2/mem128, imm8
6206 *
6207 * Proper alignment of the 128-bit operand is enforced.
6208 * Exceptions type 4. SSE2 cpuid checks.
6209 */
6210FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6211{
6212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6213 if (IEM_IS_MODRM_REG_MODE(bRm))
6214 {
6215 /*
6216 * Register, register.
6217 */
6218 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6220
6221 IEM_MC_BEGIN(3, 0);
6222 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6223 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6224 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6225 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6226 IEM_MC_PREPARE_SSE_USAGE();
6227 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6228 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6229 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6230 IEM_MC_ADVANCE_RIP();
6231 IEM_MC_END();
6232 }
6233 else
6234 {
6235 /*
6236 * Register, memory.
6237 */
6238 IEM_MC_BEGIN(3, 2);
6239 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6240 IEM_MC_LOCAL(RTUINT128U, uSrc);
6241 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6243
6244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6245 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6246 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6248 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6249
6250 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6251 IEM_MC_PREPARE_SSE_USAGE();
6252 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6253 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6254
6255 IEM_MC_ADVANCE_RIP();
6256 IEM_MC_END();
6257 }
6258 return VINF_SUCCESS;
6259}
6260
6261
6262/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6263FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6264{
6265 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6266 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6267}
6268
6269
6270/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6271FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6272{
6273 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6274 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6275}
6276
6277
6278/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6279FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6280{
6281 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6282 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6283}
6284
6285
6286/**
6287 * Common worker for MMX instructions of the form:
6288 * psrlw mm, imm8
6289 * psraw mm, imm8
6290 * psllw mm, imm8
6291 * psrld mm, imm8
6292 * psrad mm, imm8
6293 * pslld mm, imm8
6294 * psrlq mm, imm8
6295 * psllq mm, imm8
6296 *
6297 */
6298FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6299{
6300 if (IEM_IS_MODRM_REG_MODE(bRm))
6301 {
6302 /*
6303 * Register, immediate.
6304 */
6305 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6307
6308 IEM_MC_BEGIN(2, 0);
6309 IEM_MC_ARG(uint64_t *, pDst, 0);
6310 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6311 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6312 IEM_MC_PREPARE_FPU_USAGE();
6313 IEM_MC_FPU_TO_MMX_MODE();
6314
6315 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6316 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6317 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6318
6319 IEM_MC_ADVANCE_RIP();
6320 IEM_MC_END();
6321 }
6322 else
6323 {
6324 /*
6325 * Register, memory not supported.
6326 */
6327 /// @todo Caller already enforced register mode?!
6328 }
6329 return VINF_SUCCESS;
6330}
6331
6332
6333/**
6334 * Common worker for SSE2 instructions of the form:
6335 * psrlw xmm, imm8
6336 * psraw xmm, imm8
6337 * psllw xmm, imm8
6338 * psrld xmm, imm8
6339 * psrad xmm, imm8
6340 * pslld xmm, imm8
6341 * psrlq xmm, imm8
6342 * psllq xmm, imm8
6343 *
6344 */
6345FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6346{
6347 if (IEM_IS_MODRM_REG_MODE(bRm))
6348 {
6349 /*
6350 * Register, immediate.
6351 */
6352 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6354
6355 IEM_MC_BEGIN(2, 0);
6356 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6357 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6358 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6359 IEM_MC_PREPARE_SSE_USAGE();
6360 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6361 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6362 IEM_MC_ADVANCE_RIP();
6363 IEM_MC_END();
6364 }
6365 else
6366 {
6367 /*
6368 * Register, memory.
6369 */
6370 /// @todo Caller already enforced register mode?!
6371 }
6372 return VINF_SUCCESS;
6373}
6374
6375
6376/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6377FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6378{
6379// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6380 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6381}
6382
6383
6384/** Opcode 0x66 0x0f 0x71 11/2. */
6385FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6386{
6387// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6388 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6389}
6390
6391
6392/** Opcode 0x0f 0x71 11/4. */
6393FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6394{
6395// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6396 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6397}
6398
6399
6400/** Opcode 0x66 0x0f 0x71 11/4. */
6401FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6402{
6403// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6404 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6405}
6406
6407
6408/** Opcode 0x0f 0x71 11/6. */
6409FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6410{
6411// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6412 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6413}
6414
6415
6416/** Opcode 0x66 0x0f 0x71 11/6. */
6417FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6418{
6419// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6420 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6421}
6422
6423
6424/**
6425 * Group 12 jump table for register variant.
6426 */
6427IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6428{
6429 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6430 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6431 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6432 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6433 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6434 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6435 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6436 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6437};
6438AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6439
6440
6441/** Opcode 0x0f 0x71. */
6442FNIEMOP_DEF(iemOp_Grp12)
6443{
6444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6445 if (IEM_IS_MODRM_REG_MODE(bRm))
6446 /* register, register */
6447 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6448 + pVCpu->iem.s.idxPrefix], bRm);
6449 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6450}
6451
6452
6453/** Opcode 0x0f 0x72 11/2. */
6454FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6455{
6456// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6457 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6458}
6459
6460
6461/** Opcode 0x66 0x0f 0x72 11/2. */
6462FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6463{
6464// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6465 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6466}
6467
6468
6469/** Opcode 0x0f 0x72 11/4. */
6470FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6471{
6472// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6473 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6474}
6475
6476
6477/** Opcode 0x66 0x0f 0x72 11/4. */
6478FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6479{
6480// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6481 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6482}
6483
6484
6485/** Opcode 0x0f 0x72 11/6. */
6486FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6487{
6488// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6489 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6490}
6491
6492/** Opcode 0x66 0x0f 0x72 11/6. */
6493FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6494{
6495// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6496 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6497}
6498
6499
6500/**
6501 * Group 13 jump table for register variant.
6502 */
6503IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6504{
6505 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6506 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6507 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6508 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6509 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6510 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6511 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6512 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6513};
6514AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6515
6516/** Opcode 0x0f 0x72. */
6517FNIEMOP_DEF(iemOp_Grp13)
6518{
6519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6520 if (IEM_IS_MODRM_REG_MODE(bRm))
6521 /* register, register */
6522 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6523 + pVCpu->iem.s.idxPrefix], bRm);
6524 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6525}
6526
6527
6528/** Opcode 0x0f 0x73 11/2. */
6529FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6530{
6531// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6532 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6533}
6534
6535
6536/** Opcode 0x66 0x0f 0x73 11/2. */
6537FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6538{
6539// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6540 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6541}
6542
6543
6544/** Opcode 0x66 0x0f 0x73 11/3. */
6545FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6546{
6547// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6548 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6549}
6550
6551
6552/** Opcode 0x0f 0x73 11/6. */
6553FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6554{
6555// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6556 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6557}
6558
6559
6560/** Opcode 0x66 0x0f 0x73 11/6. */
6561FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6562{
6563// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6564 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6565}
6566
6567
6568/** Opcode 0x66 0x0f 0x73 11/7. */
6569FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6570{
6571// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6572 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6573}
6574
6575/**
6576 * Group 14 jump table for register variant.
6577 */
6578IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6579{
6580 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6581 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6582 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6583 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6584 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6585 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6586 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6587 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6588};
6589AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6590
6591
6592/** Opcode 0x0f 0x73. */
6593FNIEMOP_DEF(iemOp_Grp14)
6594{
6595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6596 if (IEM_IS_MODRM_REG_MODE(bRm))
6597 /* register, register */
6598 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6599 + pVCpu->iem.s.idxPrefix], bRm);
6600 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6601}
6602
6603
6604/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6605FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6606{
6607 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6608 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6609}
6610
6611
6612/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6613FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6614{
6615 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6616 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
6617}
6618
6619
6620/* Opcode 0xf3 0x0f 0x74 - invalid */
6621/* Opcode 0xf2 0x0f 0x74 - invalid */
6622
6623
6624/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6625FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6626{
6627 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6628 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6629}
6630
6631
6632/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6633FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6634{
6635 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6636 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
6637}
6638
6639
6640/* Opcode 0xf3 0x0f 0x75 - invalid */
6641/* Opcode 0xf2 0x0f 0x75 - invalid */
6642
6643
6644/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6645FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6646{
6647 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6648 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6649}
6650
6651
6652/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6653FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6654{
6655 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6656 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
6657}
6658
6659
6660/* Opcode 0xf3 0x0f 0x76 - invalid */
6661/* Opcode 0xf2 0x0f 0x76 - invalid */
6662
6663
6664/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6665FNIEMOP_DEF(iemOp_emms)
6666{
6667 IEMOP_MNEMONIC(emms, "emms");
6668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6669
6670 IEM_MC_BEGIN(0,0);
6671 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6672 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6673 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6674 IEM_MC_FPU_FROM_MMX_MODE();
6675 IEM_MC_ADVANCE_RIP();
6676 IEM_MC_END();
6677 return VINF_SUCCESS;
6678}
6679
6680/* Opcode 0x66 0x0f 0x77 - invalid */
6681/* Opcode 0xf3 0x0f 0x77 - invalid */
6682/* Opcode 0xf2 0x0f 0x77 - invalid */
6683
6684/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6685#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6686FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6687{
6688 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6689 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6690 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6691 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
6692
6693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6694 if (IEM_IS_MODRM_REG_MODE(bRm))
6695 {
6696 /*
6697 * Register, register.
6698 */
6699 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6700 if (enmEffOpSize == IEMMODE_64BIT)
6701 {
6702 IEM_MC_BEGIN(2, 0);
6703 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6704 IEM_MC_ARG(uint64_t, u64Enc, 1);
6705 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6706 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6707 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6708 IEM_MC_END();
6709 }
6710 else
6711 {
6712 IEM_MC_BEGIN(2, 0);
6713 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6714 IEM_MC_ARG(uint32_t, u32Enc, 1);
6715 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6716 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6717 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
6718 IEM_MC_END();
6719 }
6720 }
6721 else
6722 {
6723 /*
6724 * Memory, register.
6725 */
6726 if (enmEffOpSize == IEMMODE_64BIT)
6727 {
6728 IEM_MC_BEGIN(3, 0);
6729 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6730 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6731 IEM_MC_ARG(uint64_t, u64Enc, 2);
6732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6733 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6734 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6735 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6736 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
6737 IEM_MC_END();
6738 }
6739 else
6740 {
6741 IEM_MC_BEGIN(3, 0);
6742 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6743 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6744 IEM_MC_ARG(uint32_t, u32Enc, 2);
6745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6746 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6747 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6748 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6749 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
6750 IEM_MC_END();
6751 }
6752 }
6753 return VINF_SUCCESS;
6754}
6755#else
6756FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
6757#endif
6758
6759/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
6760FNIEMOP_STUB(iemOp_AmdGrp17);
6761/* Opcode 0xf3 0x0f 0x78 - invalid */
6762/* Opcode 0xf2 0x0f 0x78 - invalid */
6763
6764/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
6765#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6766FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
6767{
6768 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
6769 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
6770 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
6771 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
6772
6773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6774 if (IEM_IS_MODRM_REG_MODE(bRm))
6775 {
6776 /*
6777 * Register, register.
6778 */
6779 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6780 if (enmEffOpSize == IEMMODE_64BIT)
6781 {
6782 IEM_MC_BEGIN(2, 0);
6783 IEM_MC_ARG(uint64_t, u64Val, 0);
6784 IEM_MC_ARG(uint64_t, u64Enc, 1);
6785 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6786 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6787 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
6788 IEM_MC_END();
6789 }
6790 else
6791 {
6792 IEM_MC_BEGIN(2, 0);
6793 IEM_MC_ARG(uint32_t, u32Val, 0);
6794 IEM_MC_ARG(uint32_t, u32Enc, 1);
6795 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6796 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6797 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
6798 IEM_MC_END();
6799 }
6800 }
6801 else
6802 {
6803 /*
6804 * Register, memory.
6805 */
6806 if (enmEffOpSize == IEMMODE_64BIT)
6807 {
6808 IEM_MC_BEGIN(3, 0);
6809 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6810 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6811 IEM_MC_ARG(uint64_t, u64Enc, 2);
6812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6813 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6814 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6815 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6816 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
6817 IEM_MC_END();
6818 }
6819 else
6820 {
6821 IEM_MC_BEGIN(3, 0);
6822 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6823 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6824 IEM_MC_ARG(uint32_t, u32Enc, 2);
6825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6826 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6827 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6828 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6829 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
6830 IEM_MC_END();
6831 }
6832 }
6833 return VINF_SUCCESS;
6834}
6835#else
6836FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
6837#endif
6838/* Opcode 0x66 0x0f 0x79 - invalid */
6839/* Opcode 0xf3 0x0f 0x79 - invalid */
6840/* Opcode 0xf2 0x0f 0x79 - invalid */
6841
6842/* Opcode 0x0f 0x7a - invalid */
6843/* Opcode 0x66 0x0f 0x7a - invalid */
6844/* Opcode 0xf3 0x0f 0x7a - invalid */
6845/* Opcode 0xf2 0x0f 0x7a - invalid */
6846
6847/* Opcode 0x0f 0x7b - invalid */
6848/* Opcode 0x66 0x0f 0x7b - invalid */
6849/* Opcode 0xf3 0x0f 0x7b - invalid */
6850/* Opcode 0xf2 0x0f 0x7b - invalid */
6851
6852/* Opcode 0x0f 0x7c - invalid */
6853
6854
6855/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
6856FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
6857{
6858 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6859 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
6860}
6861
6862
6863/* Opcode 0xf3 0x0f 0x7c - invalid */
6864
6865
6866/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
6867FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
6868{
6869 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6870 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
6871}
6872
6873
6874/* Opcode 0x0f 0x7d - invalid */
6875
6876
6877/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
6878FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
6879{
6880 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6881 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
6882}
6883
6884
6885/* Opcode 0xf3 0x0f 0x7d - invalid */
6886
6887
6888/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
6889FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
6890{
6891 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6892 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
6893}
6894
6895
6896/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
6897FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
6898{
6899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6900 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6901 {
6902 /**
6903 * @opcode 0x7e
6904 * @opcodesub rex.w=1
6905 * @oppfx none
6906 * @opcpuid mmx
6907 * @opgroup og_mmx_datamove
6908 * @opxcpttype 5
6909 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6910 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6911 */
6912 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6913 if (IEM_IS_MODRM_REG_MODE(bRm))
6914 {
6915 /* greg64, MMX */
6916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6917 IEM_MC_BEGIN(0, 1);
6918 IEM_MC_LOCAL(uint64_t, u64Tmp);
6919
6920 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6921 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6922 IEM_MC_FPU_TO_MMX_MODE();
6923
6924 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6925 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
6926
6927 IEM_MC_ADVANCE_RIP();
6928 IEM_MC_END();
6929 }
6930 else
6931 {
6932 /* [mem64], MMX */
6933 IEM_MC_BEGIN(0, 2);
6934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6935 IEM_MC_LOCAL(uint64_t, u64Tmp);
6936
6937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6939 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6940 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6941 IEM_MC_FPU_TO_MMX_MODE();
6942
6943 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6944 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
6945
6946 IEM_MC_ADVANCE_RIP();
6947 IEM_MC_END();
6948 }
6949 }
6950 else
6951 {
6952 /**
6953 * @opdone
6954 * @opcode 0x7e
6955 * @opcodesub rex.w=0
6956 * @oppfx none
6957 * @opcpuid mmx
6958 * @opgroup og_mmx_datamove
6959 * @opxcpttype 5
6960 * @opfunction iemOp_movd_q_Pd_Ey
6961 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6962 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6963 */
6964 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6965 if (IEM_IS_MODRM_REG_MODE(bRm))
6966 {
6967 /* greg32, MMX */
6968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6969 IEM_MC_BEGIN(0, 1);
6970 IEM_MC_LOCAL(uint32_t, u32Tmp);
6971
6972 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6973 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6974 IEM_MC_FPU_TO_MMX_MODE();
6975
6976 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
6977 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
6978
6979 IEM_MC_ADVANCE_RIP();
6980 IEM_MC_END();
6981 }
6982 else
6983 {
6984 /* [mem32], MMX */
6985 IEM_MC_BEGIN(0, 2);
6986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6987 IEM_MC_LOCAL(uint32_t, u32Tmp);
6988
6989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6991 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6992 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6993 IEM_MC_FPU_TO_MMX_MODE();
6994
6995 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
6996 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
6997
6998 IEM_MC_ADVANCE_RIP();
6999 IEM_MC_END();
7000 }
7001 }
7002 return VINF_SUCCESS;
7003
7004}
7005
7006
7007FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7008{
7009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7010 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7011 {
7012 /**
7013 * @opcode 0x7e
7014 * @opcodesub rex.w=1
7015 * @oppfx 0x66
7016 * @opcpuid sse2
7017 * @opgroup og_sse2_simdint_datamove
7018 * @opxcpttype 5
7019 * @optest 64-bit / op1=1 op2=2 -> op1=2
7020 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7021 */
7022 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7023 if (IEM_IS_MODRM_REG_MODE(bRm))
7024 {
7025 /* greg64, XMM */
7026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7027 IEM_MC_BEGIN(0, 1);
7028 IEM_MC_LOCAL(uint64_t, u64Tmp);
7029
7030 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7032
7033 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7034 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7035
7036 IEM_MC_ADVANCE_RIP();
7037 IEM_MC_END();
7038 }
7039 else
7040 {
7041 /* [mem64], XMM */
7042 IEM_MC_BEGIN(0, 2);
7043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7044 IEM_MC_LOCAL(uint64_t, u64Tmp);
7045
7046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7048 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7049 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7050
7051 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7052 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7053
7054 IEM_MC_ADVANCE_RIP();
7055 IEM_MC_END();
7056 }
7057 }
7058 else
7059 {
7060 /**
7061 * @opdone
7062 * @opcode 0x7e
7063 * @opcodesub rex.w=0
7064 * @oppfx 0x66
7065 * @opcpuid sse2
7066 * @opgroup og_sse2_simdint_datamove
7067 * @opxcpttype 5
7068 * @opfunction iemOp_movd_q_Vy_Ey
7069 * @optest op1=1 op2=2 -> op1=2
7070 * @optest op1=0 op2=-42 -> op1=-42
7071 */
7072 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7073 if (IEM_IS_MODRM_REG_MODE(bRm))
7074 {
7075 /* greg32, XMM */
7076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7077 IEM_MC_BEGIN(0, 1);
7078 IEM_MC_LOCAL(uint32_t, u32Tmp);
7079
7080 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7081 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7082
7083 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7084 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7085
7086 IEM_MC_ADVANCE_RIP();
7087 IEM_MC_END();
7088 }
7089 else
7090 {
7091 /* [mem32], XMM */
7092 IEM_MC_BEGIN(0, 2);
7093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7094 IEM_MC_LOCAL(uint32_t, u32Tmp);
7095
7096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7098 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7099 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7100
7101 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7102 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7103
7104 IEM_MC_ADVANCE_RIP();
7105 IEM_MC_END();
7106 }
7107 }
7108 return VINF_SUCCESS;
7109
7110}
7111
7112/**
7113 * @opcode 0x7e
7114 * @oppfx 0xf3
7115 * @opcpuid sse2
7116 * @opgroup og_sse2_pcksclr_datamove
7117 * @opxcpttype none
7118 * @optest op1=1 op2=2 -> op1=2
7119 * @optest op1=0 op2=-42 -> op1=-42
7120 */
7121FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7122{
7123 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7125 if (IEM_IS_MODRM_REG_MODE(bRm))
7126 {
7127 /*
7128 * Register, register.
7129 */
7130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7131 IEM_MC_BEGIN(0, 2);
7132 IEM_MC_LOCAL(uint64_t, uSrc);
7133
7134 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7136
7137 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
7138 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7139
7140 IEM_MC_ADVANCE_RIP();
7141 IEM_MC_END();
7142 }
7143 else
7144 {
7145 /*
7146 * Memory, register.
7147 */
7148 IEM_MC_BEGIN(0, 2);
7149 IEM_MC_LOCAL(uint64_t, uSrc);
7150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7151
7152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7154 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7156
7157 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7158 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7159
7160 IEM_MC_ADVANCE_RIP();
7161 IEM_MC_END();
7162 }
7163 return VINF_SUCCESS;
7164}
7165
7166/* Opcode 0xf2 0x0f 0x7e - invalid */
7167
7168
7169/** Opcode 0x0f 0x7f - movq Qq, Pq */
7170FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7171{
7172 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7174 if (IEM_IS_MODRM_REG_MODE(bRm))
7175 {
7176 /*
7177 * Register, register.
7178 */
7179 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7180 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7182 IEM_MC_BEGIN(0, 1);
7183 IEM_MC_LOCAL(uint64_t, u64Tmp);
7184 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7185 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7186 IEM_MC_FPU_TO_MMX_MODE();
7187
7188 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7189 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7190
7191 IEM_MC_ADVANCE_RIP();
7192 IEM_MC_END();
7193 }
7194 else
7195 {
7196 /*
7197 * Memory, Register.
7198 */
7199 IEM_MC_BEGIN(0, 2);
7200 IEM_MC_LOCAL(uint64_t, u64Tmp);
7201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7202
7203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7206 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7207 IEM_MC_FPU_TO_MMX_MODE();
7208
7209 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7210 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7211
7212 IEM_MC_ADVANCE_RIP();
7213 IEM_MC_END();
7214 }
7215 return VINF_SUCCESS;
7216}
7217
7218/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7219FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7220{
7221 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7223 if (IEM_IS_MODRM_REG_MODE(bRm))
7224 {
7225 /*
7226 * Register, register.
7227 */
7228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7229 IEM_MC_BEGIN(0, 0);
7230 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7231 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7232 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7233 IEM_GET_MODRM_REG(pVCpu, bRm));
7234 IEM_MC_ADVANCE_RIP();
7235 IEM_MC_END();
7236 }
7237 else
7238 {
7239 /*
7240 * Register, memory.
7241 */
7242 IEM_MC_BEGIN(0, 2);
7243 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7245
7246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7248 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7249 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7250
7251 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7252 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7253
7254 IEM_MC_ADVANCE_RIP();
7255 IEM_MC_END();
7256 }
7257 return VINF_SUCCESS;
7258}
7259
7260/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7261FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7262{
7263 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7265 if (IEM_IS_MODRM_REG_MODE(bRm))
7266 {
7267 /*
7268 * Register, register.
7269 */
7270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7271 IEM_MC_BEGIN(0, 0);
7272 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7273 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7274 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7275 IEM_GET_MODRM_REG(pVCpu, bRm));
7276 IEM_MC_ADVANCE_RIP();
7277 IEM_MC_END();
7278 }
7279 else
7280 {
7281 /*
7282 * Register, memory.
7283 */
7284 IEM_MC_BEGIN(0, 2);
7285 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7287
7288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7290 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7291 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7292
7293 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7294 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7295
7296 IEM_MC_ADVANCE_RIP();
7297 IEM_MC_END();
7298 }
7299 return VINF_SUCCESS;
7300}
7301
7302/* Opcode 0xf2 0x0f 0x7f - invalid */
7303
7304
7305
7306/** Opcode 0x0f 0x80. */
7307FNIEMOP_DEF(iemOp_jo_Jv)
7308{
7309 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7310 IEMOP_HLP_MIN_386();
7311 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7312 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7313 {
7314 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7316
7317 IEM_MC_BEGIN(0, 0);
7318 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7319 IEM_MC_REL_JMP_S16(i16Imm);
7320 } IEM_MC_ELSE() {
7321 IEM_MC_ADVANCE_RIP();
7322 } IEM_MC_ENDIF();
7323 IEM_MC_END();
7324 }
7325 else
7326 {
7327 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7329
7330 IEM_MC_BEGIN(0, 0);
7331 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7332 IEM_MC_REL_JMP_S32(i32Imm);
7333 } IEM_MC_ELSE() {
7334 IEM_MC_ADVANCE_RIP();
7335 } IEM_MC_ENDIF();
7336 IEM_MC_END();
7337 }
7338 return VINF_SUCCESS;
7339}
7340
7341
7342/** Opcode 0x0f 0x81. */
7343FNIEMOP_DEF(iemOp_jno_Jv)
7344{
7345 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7346 IEMOP_HLP_MIN_386();
7347 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7348 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7349 {
7350 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7352
7353 IEM_MC_BEGIN(0, 0);
7354 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7355 IEM_MC_ADVANCE_RIP();
7356 } IEM_MC_ELSE() {
7357 IEM_MC_REL_JMP_S16(i16Imm);
7358 } IEM_MC_ENDIF();
7359 IEM_MC_END();
7360 }
7361 else
7362 {
7363 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7365
7366 IEM_MC_BEGIN(0, 0);
7367 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7368 IEM_MC_ADVANCE_RIP();
7369 } IEM_MC_ELSE() {
7370 IEM_MC_REL_JMP_S32(i32Imm);
7371 } IEM_MC_ENDIF();
7372 IEM_MC_END();
7373 }
7374 return VINF_SUCCESS;
7375}
7376
7377
7378/** Opcode 0x0f 0x82. */
7379FNIEMOP_DEF(iemOp_jc_Jv)
7380{
7381 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7382 IEMOP_HLP_MIN_386();
7383 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7384 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7385 {
7386 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7388
7389 IEM_MC_BEGIN(0, 0);
7390 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7391 IEM_MC_REL_JMP_S16(i16Imm);
7392 } IEM_MC_ELSE() {
7393 IEM_MC_ADVANCE_RIP();
7394 } IEM_MC_ENDIF();
7395 IEM_MC_END();
7396 }
7397 else
7398 {
7399 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7401
7402 IEM_MC_BEGIN(0, 0);
7403 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7404 IEM_MC_REL_JMP_S32(i32Imm);
7405 } IEM_MC_ELSE() {
7406 IEM_MC_ADVANCE_RIP();
7407 } IEM_MC_ENDIF();
7408 IEM_MC_END();
7409 }
7410 return VINF_SUCCESS;
7411}
7412
7413
7414/** Opcode 0x0f 0x83. */
7415FNIEMOP_DEF(iemOp_jnc_Jv)
7416{
7417 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7418 IEMOP_HLP_MIN_386();
7419 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7420 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7421 {
7422 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7424
7425 IEM_MC_BEGIN(0, 0);
7426 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7427 IEM_MC_ADVANCE_RIP();
7428 } IEM_MC_ELSE() {
7429 IEM_MC_REL_JMP_S16(i16Imm);
7430 } IEM_MC_ENDIF();
7431 IEM_MC_END();
7432 }
7433 else
7434 {
7435 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7437
7438 IEM_MC_BEGIN(0, 0);
7439 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7440 IEM_MC_ADVANCE_RIP();
7441 } IEM_MC_ELSE() {
7442 IEM_MC_REL_JMP_S32(i32Imm);
7443 } IEM_MC_ENDIF();
7444 IEM_MC_END();
7445 }
7446 return VINF_SUCCESS;
7447}
7448
7449
7450/** Opcode 0x0f 0x84. */
7451FNIEMOP_DEF(iemOp_je_Jv)
7452{
7453 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7454 IEMOP_HLP_MIN_386();
7455 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7456 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7457 {
7458 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7460
7461 IEM_MC_BEGIN(0, 0);
7462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7463 IEM_MC_REL_JMP_S16(i16Imm);
7464 } IEM_MC_ELSE() {
7465 IEM_MC_ADVANCE_RIP();
7466 } IEM_MC_ENDIF();
7467 IEM_MC_END();
7468 }
7469 else
7470 {
7471 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7473
7474 IEM_MC_BEGIN(0, 0);
7475 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7476 IEM_MC_REL_JMP_S32(i32Imm);
7477 } IEM_MC_ELSE() {
7478 IEM_MC_ADVANCE_RIP();
7479 } IEM_MC_ENDIF();
7480 IEM_MC_END();
7481 }
7482 return VINF_SUCCESS;
7483}
7484
7485
7486/** Opcode 0x0f 0x85. */
7487FNIEMOP_DEF(iemOp_jne_Jv)
7488{
7489 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7490 IEMOP_HLP_MIN_386();
7491 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7492 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7493 {
7494 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7496
7497 IEM_MC_BEGIN(0, 0);
7498 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7499 IEM_MC_ADVANCE_RIP();
7500 } IEM_MC_ELSE() {
7501 IEM_MC_REL_JMP_S16(i16Imm);
7502 } IEM_MC_ENDIF();
7503 IEM_MC_END();
7504 }
7505 else
7506 {
7507 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7509
7510 IEM_MC_BEGIN(0, 0);
7511 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7512 IEM_MC_ADVANCE_RIP();
7513 } IEM_MC_ELSE() {
7514 IEM_MC_REL_JMP_S32(i32Imm);
7515 } IEM_MC_ENDIF();
7516 IEM_MC_END();
7517 }
7518 return VINF_SUCCESS;
7519}
7520
7521
7522/** Opcode 0x0f 0x86. */
7523FNIEMOP_DEF(iemOp_jbe_Jv)
7524{
7525 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7526 IEMOP_HLP_MIN_386();
7527 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7528 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7529 {
7530 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7532
7533 IEM_MC_BEGIN(0, 0);
7534 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7535 IEM_MC_REL_JMP_S16(i16Imm);
7536 } IEM_MC_ELSE() {
7537 IEM_MC_ADVANCE_RIP();
7538 } IEM_MC_ENDIF();
7539 IEM_MC_END();
7540 }
7541 else
7542 {
7543 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7545
7546 IEM_MC_BEGIN(0, 0);
7547 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7548 IEM_MC_REL_JMP_S32(i32Imm);
7549 } IEM_MC_ELSE() {
7550 IEM_MC_ADVANCE_RIP();
7551 } IEM_MC_ENDIF();
7552 IEM_MC_END();
7553 }
7554 return VINF_SUCCESS;
7555}
7556
7557
7558/** Opcode 0x0f 0x87. */
7559FNIEMOP_DEF(iemOp_jnbe_Jv)
7560{
7561 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7562 IEMOP_HLP_MIN_386();
7563 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7564 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7565 {
7566 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7568
7569 IEM_MC_BEGIN(0, 0);
7570 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7571 IEM_MC_ADVANCE_RIP();
7572 } IEM_MC_ELSE() {
7573 IEM_MC_REL_JMP_S16(i16Imm);
7574 } IEM_MC_ENDIF();
7575 IEM_MC_END();
7576 }
7577 else
7578 {
7579 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7581
7582 IEM_MC_BEGIN(0, 0);
7583 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7584 IEM_MC_ADVANCE_RIP();
7585 } IEM_MC_ELSE() {
7586 IEM_MC_REL_JMP_S32(i32Imm);
7587 } IEM_MC_ENDIF();
7588 IEM_MC_END();
7589 }
7590 return VINF_SUCCESS;
7591}
7592
7593
7594/** Opcode 0x0f 0x88. */
7595FNIEMOP_DEF(iemOp_js_Jv)
7596{
7597 IEMOP_MNEMONIC(js_Jv, "js Jv");
7598 IEMOP_HLP_MIN_386();
7599 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7600 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7601 {
7602 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7604
7605 IEM_MC_BEGIN(0, 0);
7606 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7607 IEM_MC_REL_JMP_S16(i16Imm);
7608 } IEM_MC_ELSE() {
7609 IEM_MC_ADVANCE_RIP();
7610 } IEM_MC_ENDIF();
7611 IEM_MC_END();
7612 }
7613 else
7614 {
7615 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7617
7618 IEM_MC_BEGIN(0, 0);
7619 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7620 IEM_MC_REL_JMP_S32(i32Imm);
7621 } IEM_MC_ELSE() {
7622 IEM_MC_ADVANCE_RIP();
7623 } IEM_MC_ENDIF();
7624 IEM_MC_END();
7625 }
7626 return VINF_SUCCESS;
7627}
7628
7629
7630/** Opcode 0x0f 0x89. */
7631FNIEMOP_DEF(iemOp_jns_Jv)
7632{
7633 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7634 IEMOP_HLP_MIN_386();
7635 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7636 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7637 {
7638 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7640
7641 IEM_MC_BEGIN(0, 0);
7642 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7643 IEM_MC_ADVANCE_RIP();
7644 } IEM_MC_ELSE() {
7645 IEM_MC_REL_JMP_S16(i16Imm);
7646 } IEM_MC_ENDIF();
7647 IEM_MC_END();
7648 }
7649 else
7650 {
7651 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7653
7654 IEM_MC_BEGIN(0, 0);
7655 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7656 IEM_MC_ADVANCE_RIP();
7657 } IEM_MC_ELSE() {
7658 IEM_MC_REL_JMP_S32(i32Imm);
7659 } IEM_MC_ENDIF();
7660 IEM_MC_END();
7661 }
7662 return VINF_SUCCESS;
7663}
7664
7665
7666/** Opcode 0x0f 0x8a. */
7667FNIEMOP_DEF(iemOp_jp_Jv)
7668{
7669 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7670 IEMOP_HLP_MIN_386();
7671 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7672 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7673 {
7674 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7676
7677 IEM_MC_BEGIN(0, 0);
7678 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7679 IEM_MC_REL_JMP_S16(i16Imm);
7680 } IEM_MC_ELSE() {
7681 IEM_MC_ADVANCE_RIP();
7682 } IEM_MC_ENDIF();
7683 IEM_MC_END();
7684 }
7685 else
7686 {
7687 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7689
7690 IEM_MC_BEGIN(0, 0);
7691 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7692 IEM_MC_REL_JMP_S32(i32Imm);
7693 } IEM_MC_ELSE() {
7694 IEM_MC_ADVANCE_RIP();
7695 } IEM_MC_ENDIF();
7696 IEM_MC_END();
7697 }
7698 return VINF_SUCCESS;
7699}
7700
7701
7702/** Opcode 0x0f 0x8b. */
7703FNIEMOP_DEF(iemOp_jnp_Jv)
7704{
7705 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7706 IEMOP_HLP_MIN_386();
7707 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7708 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7709 {
7710 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7712
7713 IEM_MC_BEGIN(0, 0);
7714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7715 IEM_MC_ADVANCE_RIP();
7716 } IEM_MC_ELSE() {
7717 IEM_MC_REL_JMP_S16(i16Imm);
7718 } IEM_MC_ENDIF();
7719 IEM_MC_END();
7720 }
7721 else
7722 {
7723 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7725
7726 IEM_MC_BEGIN(0, 0);
7727 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7728 IEM_MC_ADVANCE_RIP();
7729 } IEM_MC_ELSE() {
7730 IEM_MC_REL_JMP_S32(i32Imm);
7731 } IEM_MC_ENDIF();
7732 IEM_MC_END();
7733 }
7734 return VINF_SUCCESS;
7735}
7736
7737
7738/** Opcode 0x0f 0x8c. */
7739FNIEMOP_DEF(iemOp_jl_Jv)
7740{
7741 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7742 IEMOP_HLP_MIN_386();
7743 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7744 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7745 {
7746 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7748
7749 IEM_MC_BEGIN(0, 0);
7750 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7751 IEM_MC_REL_JMP_S16(i16Imm);
7752 } IEM_MC_ELSE() {
7753 IEM_MC_ADVANCE_RIP();
7754 } IEM_MC_ENDIF();
7755 IEM_MC_END();
7756 }
7757 else
7758 {
7759 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7761
7762 IEM_MC_BEGIN(0, 0);
7763 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7764 IEM_MC_REL_JMP_S32(i32Imm);
7765 } IEM_MC_ELSE() {
7766 IEM_MC_ADVANCE_RIP();
7767 } IEM_MC_ENDIF();
7768 IEM_MC_END();
7769 }
7770 return VINF_SUCCESS;
7771}
7772
7773
7774/** Opcode 0x0f 0x8d. */
7775FNIEMOP_DEF(iemOp_jnl_Jv)
7776{
7777 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
7778 IEMOP_HLP_MIN_386();
7779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7780 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7781 {
7782 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7784
7785 IEM_MC_BEGIN(0, 0);
7786 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7787 IEM_MC_ADVANCE_RIP();
7788 } IEM_MC_ELSE() {
7789 IEM_MC_REL_JMP_S16(i16Imm);
7790 } IEM_MC_ENDIF();
7791 IEM_MC_END();
7792 }
7793 else
7794 {
7795 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7797
7798 IEM_MC_BEGIN(0, 0);
7799 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7800 IEM_MC_ADVANCE_RIP();
7801 } IEM_MC_ELSE() {
7802 IEM_MC_REL_JMP_S32(i32Imm);
7803 } IEM_MC_ENDIF();
7804 IEM_MC_END();
7805 }
7806 return VINF_SUCCESS;
7807}
7808
7809
7810/** Opcode 0x0f 0x8e. */
7811FNIEMOP_DEF(iemOp_jle_Jv)
7812{
7813 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
7814 IEMOP_HLP_MIN_386();
7815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7816 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7817 {
7818 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7820
7821 IEM_MC_BEGIN(0, 0);
7822 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7823 IEM_MC_REL_JMP_S16(i16Imm);
7824 } IEM_MC_ELSE() {
7825 IEM_MC_ADVANCE_RIP();
7826 } IEM_MC_ENDIF();
7827 IEM_MC_END();
7828 }
7829 else
7830 {
7831 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7833
7834 IEM_MC_BEGIN(0, 0);
7835 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7836 IEM_MC_REL_JMP_S32(i32Imm);
7837 } IEM_MC_ELSE() {
7838 IEM_MC_ADVANCE_RIP();
7839 } IEM_MC_ENDIF();
7840 IEM_MC_END();
7841 }
7842 return VINF_SUCCESS;
7843}
7844
7845
7846/** Opcode 0x0f 0x8f. */
7847FNIEMOP_DEF(iemOp_jnle_Jv)
7848{
7849 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
7850 IEMOP_HLP_MIN_386();
7851 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7852 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7853 {
7854 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7856
7857 IEM_MC_BEGIN(0, 0);
7858 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7859 IEM_MC_ADVANCE_RIP();
7860 } IEM_MC_ELSE() {
7861 IEM_MC_REL_JMP_S16(i16Imm);
7862 } IEM_MC_ENDIF();
7863 IEM_MC_END();
7864 }
7865 else
7866 {
7867 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7869
7870 IEM_MC_BEGIN(0, 0);
7871 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7872 IEM_MC_ADVANCE_RIP();
7873 } IEM_MC_ELSE() {
7874 IEM_MC_REL_JMP_S32(i32Imm);
7875 } IEM_MC_ENDIF();
7876 IEM_MC_END();
7877 }
7878 return VINF_SUCCESS;
7879}
7880
7881
7882/** Opcode 0x0f 0x90. */
7883FNIEMOP_DEF(iemOp_seto_Eb)
7884{
7885 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
7886 IEMOP_HLP_MIN_386();
7887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7888
7889 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7890 * any way. AMD says it's "unused", whatever that means. We're
7891 * ignoring for now. */
7892 if (IEM_IS_MODRM_REG_MODE(bRm))
7893 {
7894 /* register target */
7895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7896 IEM_MC_BEGIN(0, 0);
7897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7898 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7899 } IEM_MC_ELSE() {
7900 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7901 } IEM_MC_ENDIF();
7902 IEM_MC_ADVANCE_RIP();
7903 IEM_MC_END();
7904 }
7905 else
7906 {
7907 /* memory target */
7908 IEM_MC_BEGIN(0, 1);
7909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7912 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7913 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7914 } IEM_MC_ELSE() {
7915 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7916 } IEM_MC_ENDIF();
7917 IEM_MC_ADVANCE_RIP();
7918 IEM_MC_END();
7919 }
7920 return VINF_SUCCESS;
7921}
7922
7923
7924/** Opcode 0x0f 0x91. */
7925FNIEMOP_DEF(iemOp_setno_Eb)
7926{
7927 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
7928 IEMOP_HLP_MIN_386();
7929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7930
7931 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7932 * any way. AMD says it's "unused", whatever that means. We're
7933 * ignoring for now. */
7934 if (IEM_IS_MODRM_REG_MODE(bRm))
7935 {
7936 /* register target */
7937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7938 IEM_MC_BEGIN(0, 0);
7939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7940 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7941 } IEM_MC_ELSE() {
7942 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7943 } IEM_MC_ENDIF();
7944 IEM_MC_ADVANCE_RIP();
7945 IEM_MC_END();
7946 }
7947 else
7948 {
7949 /* memory target */
7950 IEM_MC_BEGIN(0, 1);
7951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7954 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7955 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7956 } IEM_MC_ELSE() {
7957 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7958 } IEM_MC_ENDIF();
7959 IEM_MC_ADVANCE_RIP();
7960 IEM_MC_END();
7961 }
7962 return VINF_SUCCESS;
7963}
7964
7965
7966/** Opcode 0x0f 0x92. */
7967FNIEMOP_DEF(iemOp_setc_Eb)
7968{
7969 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
7970 IEMOP_HLP_MIN_386();
7971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7972
7973 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7974 * any way. AMD says it's "unused", whatever that means. We're
7975 * ignoring for now. */
7976 if (IEM_IS_MODRM_REG_MODE(bRm))
7977 {
7978 /* register target */
7979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7980 IEM_MC_BEGIN(0, 0);
7981 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7982 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7983 } IEM_MC_ELSE() {
7984 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7985 } IEM_MC_ENDIF();
7986 IEM_MC_ADVANCE_RIP();
7987 IEM_MC_END();
7988 }
7989 else
7990 {
7991 /* memory target */
7992 IEM_MC_BEGIN(0, 1);
7993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7996 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7997 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7998 } IEM_MC_ELSE() {
7999 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8000 } IEM_MC_ENDIF();
8001 IEM_MC_ADVANCE_RIP();
8002 IEM_MC_END();
8003 }
8004 return VINF_SUCCESS;
8005}
8006
8007
8008/** Opcode 0x0f 0x93. */
8009FNIEMOP_DEF(iemOp_setnc_Eb)
8010{
8011 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8012 IEMOP_HLP_MIN_386();
8013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8014
8015 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8016 * any way. AMD says it's "unused", whatever that means. We're
8017 * ignoring for now. */
8018 if (IEM_IS_MODRM_REG_MODE(bRm))
8019 {
8020 /* register target */
8021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8022 IEM_MC_BEGIN(0, 0);
8023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8024 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8025 } IEM_MC_ELSE() {
8026 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8027 } IEM_MC_ENDIF();
8028 IEM_MC_ADVANCE_RIP();
8029 IEM_MC_END();
8030 }
8031 else
8032 {
8033 /* memory target */
8034 IEM_MC_BEGIN(0, 1);
8035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8038 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8039 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8040 } IEM_MC_ELSE() {
8041 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8042 } IEM_MC_ENDIF();
8043 IEM_MC_ADVANCE_RIP();
8044 IEM_MC_END();
8045 }
8046 return VINF_SUCCESS;
8047}
8048
8049
8050/** Opcode 0x0f 0x94. */
8051FNIEMOP_DEF(iemOp_sete_Eb)
8052{
8053 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8054 IEMOP_HLP_MIN_386();
8055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8056
8057 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8058 * any way. AMD says it's "unused", whatever that means. We're
8059 * ignoring for now. */
8060 if (IEM_IS_MODRM_REG_MODE(bRm))
8061 {
8062 /* register target */
8063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8064 IEM_MC_BEGIN(0, 0);
8065 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8066 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8067 } IEM_MC_ELSE() {
8068 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8069 } IEM_MC_ENDIF();
8070 IEM_MC_ADVANCE_RIP();
8071 IEM_MC_END();
8072 }
8073 else
8074 {
8075 /* memory target */
8076 IEM_MC_BEGIN(0, 1);
8077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8080 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8081 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8082 } IEM_MC_ELSE() {
8083 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8084 } IEM_MC_ENDIF();
8085 IEM_MC_ADVANCE_RIP();
8086 IEM_MC_END();
8087 }
8088 return VINF_SUCCESS;
8089}
8090
8091
8092/** Opcode 0x0f 0x95. */
8093FNIEMOP_DEF(iemOp_setne_Eb)
8094{
8095 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8096 IEMOP_HLP_MIN_386();
8097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8098
8099 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8100 * any way. AMD says it's "unused", whatever that means. We're
8101 * ignoring for now. */
8102 if (IEM_IS_MODRM_REG_MODE(bRm))
8103 {
8104 /* register target */
8105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8106 IEM_MC_BEGIN(0, 0);
8107 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8108 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8109 } IEM_MC_ELSE() {
8110 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8111 } IEM_MC_ENDIF();
8112 IEM_MC_ADVANCE_RIP();
8113 IEM_MC_END();
8114 }
8115 else
8116 {
8117 /* memory target */
8118 IEM_MC_BEGIN(0, 1);
8119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8124 } IEM_MC_ELSE() {
8125 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8126 } IEM_MC_ENDIF();
8127 IEM_MC_ADVANCE_RIP();
8128 IEM_MC_END();
8129 }
8130 return VINF_SUCCESS;
8131}
8132
8133
8134/** Opcode 0x0f 0x96. */
8135FNIEMOP_DEF(iemOp_setbe_Eb)
8136{
8137 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8138 IEMOP_HLP_MIN_386();
8139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8140
8141 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8142 * any way. AMD says it's "unused", whatever that means. We're
8143 * ignoring for now. */
8144 if (IEM_IS_MODRM_REG_MODE(bRm))
8145 {
8146 /* register target */
8147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8148 IEM_MC_BEGIN(0, 0);
8149 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8150 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8151 } IEM_MC_ELSE() {
8152 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8153 } IEM_MC_ENDIF();
8154 IEM_MC_ADVANCE_RIP();
8155 IEM_MC_END();
8156 }
8157 else
8158 {
8159 /* memory target */
8160 IEM_MC_BEGIN(0, 1);
8161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8164 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8165 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8166 } IEM_MC_ELSE() {
8167 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8168 } IEM_MC_ENDIF();
8169 IEM_MC_ADVANCE_RIP();
8170 IEM_MC_END();
8171 }
8172 return VINF_SUCCESS;
8173}
8174
8175
8176/** Opcode 0x0f 0x97. */
8177FNIEMOP_DEF(iemOp_setnbe_Eb)
8178{
8179 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8180 IEMOP_HLP_MIN_386();
8181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8182
8183 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8184 * any way. AMD says it's "unused", whatever that means. We're
8185 * ignoring for now. */
8186 if (IEM_IS_MODRM_REG_MODE(bRm))
8187 {
8188 /* register target */
8189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8190 IEM_MC_BEGIN(0, 0);
8191 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8192 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8193 } IEM_MC_ELSE() {
8194 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8195 } IEM_MC_ENDIF();
8196 IEM_MC_ADVANCE_RIP();
8197 IEM_MC_END();
8198 }
8199 else
8200 {
8201 /* memory target */
8202 IEM_MC_BEGIN(0, 1);
8203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8206 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8207 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8208 } IEM_MC_ELSE() {
8209 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8210 } IEM_MC_ENDIF();
8211 IEM_MC_ADVANCE_RIP();
8212 IEM_MC_END();
8213 }
8214 return VINF_SUCCESS;
8215}
8216
8217
8218/** Opcode 0x0f 0x98. */
8219FNIEMOP_DEF(iemOp_sets_Eb)
8220{
8221 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8222 IEMOP_HLP_MIN_386();
8223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8224
8225 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8226 * any way. AMD says it's "unused", whatever that means. We're
8227 * ignoring for now. */
8228 if (IEM_IS_MODRM_REG_MODE(bRm))
8229 {
8230 /* register target */
8231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8232 IEM_MC_BEGIN(0, 0);
8233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8234 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8235 } IEM_MC_ELSE() {
8236 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8237 } IEM_MC_ENDIF();
8238 IEM_MC_ADVANCE_RIP();
8239 IEM_MC_END();
8240 }
8241 else
8242 {
8243 /* memory target */
8244 IEM_MC_BEGIN(0, 1);
8245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8248 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8249 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8250 } IEM_MC_ELSE() {
8251 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8252 } IEM_MC_ENDIF();
8253 IEM_MC_ADVANCE_RIP();
8254 IEM_MC_END();
8255 }
8256 return VINF_SUCCESS;
8257}
8258
8259
8260/** Opcode 0x0f 0x99. */
8261FNIEMOP_DEF(iemOp_setns_Eb)
8262{
8263 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8264 IEMOP_HLP_MIN_386();
8265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8266
8267 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8268 * any way. AMD says it's "unused", whatever that means. We're
8269 * ignoring for now. */
8270 if (IEM_IS_MODRM_REG_MODE(bRm))
8271 {
8272 /* register target */
8273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8274 IEM_MC_BEGIN(0, 0);
8275 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8276 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8277 } IEM_MC_ELSE() {
8278 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8279 } IEM_MC_ENDIF();
8280 IEM_MC_ADVANCE_RIP();
8281 IEM_MC_END();
8282 }
8283 else
8284 {
8285 /* memory target */
8286 IEM_MC_BEGIN(0, 1);
8287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8290 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8292 } IEM_MC_ELSE() {
8293 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8294 } IEM_MC_ENDIF();
8295 IEM_MC_ADVANCE_RIP();
8296 IEM_MC_END();
8297 }
8298 return VINF_SUCCESS;
8299}
8300
8301
8302/** Opcode 0x0f 0x9a. */
8303FNIEMOP_DEF(iemOp_setp_Eb)
8304{
8305 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8306 IEMOP_HLP_MIN_386();
8307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8308
8309 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8310 * any way. AMD says it's "unused", whatever that means. We're
8311 * ignoring for now. */
8312 if (IEM_IS_MODRM_REG_MODE(bRm))
8313 {
8314 /* register target */
8315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8316 IEM_MC_BEGIN(0, 0);
8317 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8318 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8319 } IEM_MC_ELSE() {
8320 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8321 } IEM_MC_ENDIF();
8322 IEM_MC_ADVANCE_RIP();
8323 IEM_MC_END();
8324 }
8325 else
8326 {
8327 /* memory target */
8328 IEM_MC_BEGIN(0, 1);
8329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8332 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8334 } IEM_MC_ELSE() {
8335 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8336 } IEM_MC_ENDIF();
8337 IEM_MC_ADVANCE_RIP();
8338 IEM_MC_END();
8339 }
8340 return VINF_SUCCESS;
8341}
8342
8343
8344/** Opcode 0x0f 0x9b. */
8345FNIEMOP_DEF(iemOp_setnp_Eb)
8346{
8347 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8348 IEMOP_HLP_MIN_386();
8349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8350
8351 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8352 * any way. AMD says it's "unused", whatever that means. We're
8353 * ignoring for now. */
8354 if (IEM_IS_MODRM_REG_MODE(bRm))
8355 {
8356 /* register target */
8357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8358 IEM_MC_BEGIN(0, 0);
8359 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8360 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8361 } IEM_MC_ELSE() {
8362 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8363 } IEM_MC_ENDIF();
8364 IEM_MC_ADVANCE_RIP();
8365 IEM_MC_END();
8366 }
8367 else
8368 {
8369 /* memory target */
8370 IEM_MC_BEGIN(0, 1);
8371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8374 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8376 } IEM_MC_ELSE() {
8377 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8378 } IEM_MC_ENDIF();
8379 IEM_MC_ADVANCE_RIP();
8380 IEM_MC_END();
8381 }
8382 return VINF_SUCCESS;
8383}
8384
8385
8386/** Opcode 0x0f 0x9c. */
8387FNIEMOP_DEF(iemOp_setl_Eb)
8388{
8389 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8390 IEMOP_HLP_MIN_386();
8391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8392
8393 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8394 * any way. AMD says it's "unused", whatever that means. We're
8395 * ignoring for now. */
8396 if (IEM_IS_MODRM_REG_MODE(bRm))
8397 {
8398 /* register target */
8399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8400 IEM_MC_BEGIN(0, 0);
8401 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8402 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8403 } IEM_MC_ELSE() {
8404 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8405 } IEM_MC_ENDIF();
8406 IEM_MC_ADVANCE_RIP();
8407 IEM_MC_END();
8408 }
8409 else
8410 {
8411 /* memory target */
8412 IEM_MC_BEGIN(0, 1);
8413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8418 } IEM_MC_ELSE() {
8419 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8420 } IEM_MC_ENDIF();
8421 IEM_MC_ADVANCE_RIP();
8422 IEM_MC_END();
8423 }
8424 return VINF_SUCCESS;
8425}
8426
8427
8428/** Opcode 0x0f 0x9d. */
8429FNIEMOP_DEF(iemOp_setnl_Eb)
8430{
8431 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8432 IEMOP_HLP_MIN_386();
8433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8434
8435 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8436 * any way. AMD says it's "unused", whatever that means. We're
8437 * ignoring for now. */
8438 if (IEM_IS_MODRM_REG_MODE(bRm))
8439 {
8440 /* register target */
8441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8442 IEM_MC_BEGIN(0, 0);
8443 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8444 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8445 } IEM_MC_ELSE() {
8446 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8447 } IEM_MC_ENDIF();
8448 IEM_MC_ADVANCE_RIP();
8449 IEM_MC_END();
8450 }
8451 else
8452 {
8453 /* memory target */
8454 IEM_MC_BEGIN(0, 1);
8455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8458 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8460 } IEM_MC_ELSE() {
8461 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8462 } IEM_MC_ENDIF();
8463 IEM_MC_ADVANCE_RIP();
8464 IEM_MC_END();
8465 }
8466 return VINF_SUCCESS;
8467}
8468
8469
8470/** Opcode 0x0f 0x9e. */
8471FNIEMOP_DEF(iemOp_setle_Eb)
8472{
8473 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8474 IEMOP_HLP_MIN_386();
8475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8476
8477 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8478 * any way. AMD says it's "unused", whatever that means. We're
8479 * ignoring for now. */
8480 if (IEM_IS_MODRM_REG_MODE(bRm))
8481 {
8482 /* register target */
8483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8484 IEM_MC_BEGIN(0, 0);
8485 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8486 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8487 } IEM_MC_ELSE() {
8488 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8489 } IEM_MC_ENDIF();
8490 IEM_MC_ADVANCE_RIP();
8491 IEM_MC_END();
8492 }
8493 else
8494 {
8495 /* memory target */
8496 IEM_MC_BEGIN(0, 1);
8497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8502 } IEM_MC_ELSE() {
8503 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8504 } IEM_MC_ENDIF();
8505 IEM_MC_ADVANCE_RIP();
8506 IEM_MC_END();
8507 }
8508 return VINF_SUCCESS;
8509}
8510
8511
8512/** Opcode 0x0f 0x9f. */
8513FNIEMOP_DEF(iemOp_setnle_Eb)
8514{
8515 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8516 IEMOP_HLP_MIN_386();
8517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8518
8519 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8520 * any way. AMD says it's "unused", whatever that means. We're
8521 * ignoring for now. */
8522 if (IEM_IS_MODRM_REG_MODE(bRm))
8523 {
8524 /* register target */
8525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8526 IEM_MC_BEGIN(0, 0);
8527 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8528 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8529 } IEM_MC_ELSE() {
8530 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8531 } IEM_MC_ENDIF();
8532 IEM_MC_ADVANCE_RIP();
8533 IEM_MC_END();
8534 }
8535 else
8536 {
8537 /* memory target */
8538 IEM_MC_BEGIN(0, 1);
8539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8542 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8544 } IEM_MC_ELSE() {
8545 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8546 } IEM_MC_ENDIF();
8547 IEM_MC_ADVANCE_RIP();
8548 IEM_MC_END();
8549 }
8550 return VINF_SUCCESS;
8551}
8552
8553
8554/**
8555 * Common 'push segment-register' helper.
8556 */
8557FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8558{
8559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8560 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8561 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8562
8563 switch (pVCpu->iem.s.enmEffOpSize)
8564 {
8565 case IEMMODE_16BIT:
8566 IEM_MC_BEGIN(0, 1);
8567 IEM_MC_LOCAL(uint16_t, u16Value);
8568 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8569 IEM_MC_PUSH_U16(u16Value);
8570 IEM_MC_ADVANCE_RIP();
8571 IEM_MC_END();
8572 break;
8573
8574 case IEMMODE_32BIT:
8575 IEM_MC_BEGIN(0, 1);
8576 IEM_MC_LOCAL(uint32_t, u32Value);
8577 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8578 IEM_MC_PUSH_U32_SREG(u32Value);
8579 IEM_MC_ADVANCE_RIP();
8580 IEM_MC_END();
8581 break;
8582
8583 case IEMMODE_64BIT:
8584 IEM_MC_BEGIN(0, 1);
8585 IEM_MC_LOCAL(uint64_t, u64Value);
8586 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8587 IEM_MC_PUSH_U64(u64Value);
8588 IEM_MC_ADVANCE_RIP();
8589 IEM_MC_END();
8590 break;
8591 }
8592
8593 return VINF_SUCCESS;
8594}
8595
8596
8597/** Opcode 0x0f 0xa0. */
8598FNIEMOP_DEF(iemOp_push_fs)
8599{
8600 IEMOP_MNEMONIC(push_fs, "push fs");
8601 IEMOP_HLP_MIN_386();
8602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8603 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8604}
8605
8606
8607/** Opcode 0x0f 0xa1. */
8608FNIEMOP_DEF(iemOp_pop_fs)
8609{
8610 IEMOP_MNEMONIC(pop_fs, "pop fs");
8611 IEMOP_HLP_MIN_386();
8612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8613 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8614}
8615
8616
8617/** Opcode 0x0f 0xa2. */
8618FNIEMOP_DEF(iemOp_cpuid)
8619{
8620 IEMOP_MNEMONIC(cpuid, "cpuid");
8621 IEMOP_HLP_MIN_486(); /* not all 486es. */
8622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8623 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
8624}
8625
8626
8627/**
8628 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8629 * iemOp_bts_Ev_Gv.
8630 */
8631FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
8632{
8633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8634 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8635
8636 if (IEM_IS_MODRM_REG_MODE(bRm))
8637 {
8638 /* register destination. */
8639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8640 switch (pVCpu->iem.s.enmEffOpSize)
8641 {
8642 case IEMMODE_16BIT:
8643 IEM_MC_BEGIN(3, 0);
8644 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8645 IEM_MC_ARG(uint16_t, u16Src, 1);
8646 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8647
8648 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8649 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
8650 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8651 IEM_MC_REF_EFLAGS(pEFlags);
8652 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8653
8654 IEM_MC_ADVANCE_RIP();
8655 IEM_MC_END();
8656 return VINF_SUCCESS;
8657
8658 case IEMMODE_32BIT:
8659 IEM_MC_BEGIN(3, 0);
8660 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8661 IEM_MC_ARG(uint32_t, u32Src, 1);
8662 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8663
8664 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8665 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
8666 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8667 IEM_MC_REF_EFLAGS(pEFlags);
8668 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8669
8670 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8671 IEM_MC_ADVANCE_RIP();
8672 IEM_MC_END();
8673 return VINF_SUCCESS;
8674
8675 case IEMMODE_64BIT:
8676 IEM_MC_BEGIN(3, 0);
8677 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8678 IEM_MC_ARG(uint64_t, u64Src, 1);
8679 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8680
8681 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8682 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
8683 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8684 IEM_MC_REF_EFLAGS(pEFlags);
8685 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8686
8687 IEM_MC_ADVANCE_RIP();
8688 IEM_MC_END();
8689 return VINF_SUCCESS;
8690
8691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8692 }
8693 }
8694 else
8695 {
8696 /* memory destination. */
8697
8698 uint32_t fAccess;
8699 if (pImpl->pfnLockedU16)
8700 fAccess = IEM_ACCESS_DATA_RW;
8701 else /* BT */
8702 fAccess = IEM_ACCESS_DATA_R;
8703
8704 /** @todo test negative bit offsets! */
8705 switch (pVCpu->iem.s.enmEffOpSize)
8706 {
8707 case IEMMODE_16BIT:
8708 IEM_MC_BEGIN(3, 2);
8709 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8710 IEM_MC_ARG(uint16_t, u16Src, 1);
8711 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8713 IEM_MC_LOCAL(int16_t, i16AddrAdj);
8714
8715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8716 if (pImpl->pfnLockedU16)
8717 IEMOP_HLP_DONE_DECODING();
8718 else
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8721 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
8722 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
8723 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
8724 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
8725 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
8726 IEM_MC_FETCH_EFLAGS(EFlags);
8727
8728 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8729 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8730 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8731 else
8732 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8733 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8734
8735 IEM_MC_COMMIT_EFLAGS(EFlags);
8736 IEM_MC_ADVANCE_RIP();
8737 IEM_MC_END();
8738 return VINF_SUCCESS;
8739
8740 case IEMMODE_32BIT:
8741 IEM_MC_BEGIN(3, 2);
8742 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8743 IEM_MC_ARG(uint32_t, u32Src, 1);
8744 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8746 IEM_MC_LOCAL(int32_t, i32AddrAdj);
8747
8748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8749 if (pImpl->pfnLockedU16)
8750 IEMOP_HLP_DONE_DECODING();
8751 else
8752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8753 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8754 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
8755 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
8756 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
8757 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
8758 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
8759 IEM_MC_FETCH_EFLAGS(EFlags);
8760
8761 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8762 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8763 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8764 else
8765 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8766 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8767
8768 IEM_MC_COMMIT_EFLAGS(EFlags);
8769 IEM_MC_ADVANCE_RIP();
8770 IEM_MC_END();
8771 return VINF_SUCCESS;
8772
8773 case IEMMODE_64BIT:
8774 IEM_MC_BEGIN(3, 2);
8775 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8776 IEM_MC_ARG(uint64_t, u64Src, 1);
8777 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8779 IEM_MC_LOCAL(int64_t, i64AddrAdj);
8780
8781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8782 if (pImpl->pfnLockedU16)
8783 IEMOP_HLP_DONE_DECODING();
8784 else
8785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8786 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8787 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
8788 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
8789 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
8790 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
8791 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
8792 IEM_MC_FETCH_EFLAGS(EFlags);
8793
8794 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8795 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8796 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8797 else
8798 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8799 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8800
8801 IEM_MC_COMMIT_EFLAGS(EFlags);
8802 IEM_MC_ADVANCE_RIP();
8803 IEM_MC_END();
8804 return VINF_SUCCESS;
8805
8806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8807 }
8808 }
8809}
8810
8811
8812/** Opcode 0x0f 0xa3. */
8813FNIEMOP_DEF(iemOp_bt_Ev_Gv)
8814{
8815 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
8816 IEMOP_HLP_MIN_386();
8817 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
8818}
8819
8820
8821/**
8822 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
8823 */
8824FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
8825{
8826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8827 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
8828
8829 if (IEM_IS_MODRM_REG_MODE(bRm))
8830 {
8831 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8833
8834 switch (pVCpu->iem.s.enmEffOpSize)
8835 {
8836 case IEMMODE_16BIT:
8837 IEM_MC_BEGIN(4, 0);
8838 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8839 IEM_MC_ARG(uint16_t, u16Src, 1);
8840 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8841 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8842
8843 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8844 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8845 IEM_MC_REF_EFLAGS(pEFlags);
8846 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8847
8848 IEM_MC_ADVANCE_RIP();
8849 IEM_MC_END();
8850 return VINF_SUCCESS;
8851
8852 case IEMMODE_32BIT:
8853 IEM_MC_BEGIN(4, 0);
8854 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8855 IEM_MC_ARG(uint32_t, u32Src, 1);
8856 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8857 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8858
8859 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8860 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8861 IEM_MC_REF_EFLAGS(pEFlags);
8862 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
8863
8864 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8865 IEM_MC_ADVANCE_RIP();
8866 IEM_MC_END();
8867 return VINF_SUCCESS;
8868
8869 case IEMMODE_64BIT:
8870 IEM_MC_BEGIN(4, 0);
8871 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8872 IEM_MC_ARG(uint64_t, u64Src, 1);
8873 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8874 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8875
8876 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8877 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8878 IEM_MC_REF_EFLAGS(pEFlags);
8879 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
8880
8881 IEM_MC_ADVANCE_RIP();
8882 IEM_MC_END();
8883 return VINF_SUCCESS;
8884
8885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8886 }
8887 }
8888 else
8889 {
8890 switch (pVCpu->iem.s.enmEffOpSize)
8891 {
8892 case IEMMODE_16BIT:
8893 IEM_MC_BEGIN(4, 2);
8894 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8895 IEM_MC_ARG(uint16_t, u16Src, 1);
8896 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8897 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8899
8900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8901 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8902 IEM_MC_ASSIGN(cShiftArg, cShift);
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8905 IEM_MC_FETCH_EFLAGS(EFlags);
8906 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8907 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8908
8909 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8910 IEM_MC_COMMIT_EFLAGS(EFlags);
8911 IEM_MC_ADVANCE_RIP();
8912 IEM_MC_END();
8913 return VINF_SUCCESS;
8914
8915 case IEMMODE_32BIT:
8916 IEM_MC_BEGIN(4, 2);
8917 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8918 IEM_MC_ARG(uint32_t, u32Src, 1);
8919 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8920 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8922
8923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8924 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8925 IEM_MC_ASSIGN(cShiftArg, cShift);
8926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8927 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8928 IEM_MC_FETCH_EFLAGS(EFlags);
8929 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8930 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
8931
8932 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8933 IEM_MC_COMMIT_EFLAGS(EFlags);
8934 IEM_MC_ADVANCE_RIP();
8935 IEM_MC_END();
8936 return VINF_SUCCESS;
8937
8938 case IEMMODE_64BIT:
8939 IEM_MC_BEGIN(4, 2);
8940 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8941 IEM_MC_ARG(uint64_t, u64Src, 1);
8942 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8945
8946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8947 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8948 IEM_MC_ASSIGN(cShiftArg, cShift);
8949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8950 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8951 IEM_MC_FETCH_EFLAGS(EFlags);
8952 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8953 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
8954
8955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8956 IEM_MC_COMMIT_EFLAGS(EFlags);
8957 IEM_MC_ADVANCE_RIP();
8958 IEM_MC_END();
8959 return VINF_SUCCESS;
8960
8961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8962 }
8963 }
8964}
8965
8966
8967/**
8968 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
8969 */
8970FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
8971{
8972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8973 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
8974
8975 if (IEM_IS_MODRM_REG_MODE(bRm))
8976 {
8977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8978
8979 switch (pVCpu->iem.s.enmEffOpSize)
8980 {
8981 case IEMMODE_16BIT:
8982 IEM_MC_BEGIN(4, 0);
8983 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8984 IEM_MC_ARG(uint16_t, u16Src, 1);
8985 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8986 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8987
8988 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8989 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8990 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8991 IEM_MC_REF_EFLAGS(pEFlags);
8992 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8993
8994 IEM_MC_ADVANCE_RIP();
8995 IEM_MC_END();
8996 return VINF_SUCCESS;
8997
8998 case IEMMODE_32BIT:
8999 IEM_MC_BEGIN(4, 0);
9000 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9001 IEM_MC_ARG(uint32_t, u32Src, 1);
9002 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9003 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9004
9005 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9006 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9007 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9008 IEM_MC_REF_EFLAGS(pEFlags);
9009 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9010
9011 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9012 IEM_MC_ADVANCE_RIP();
9013 IEM_MC_END();
9014 return VINF_SUCCESS;
9015
9016 case IEMMODE_64BIT:
9017 IEM_MC_BEGIN(4, 0);
9018 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9019 IEM_MC_ARG(uint64_t, u64Src, 1);
9020 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9021 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9022
9023 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9024 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9025 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9026 IEM_MC_REF_EFLAGS(pEFlags);
9027 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9028
9029 IEM_MC_ADVANCE_RIP();
9030 IEM_MC_END();
9031 return VINF_SUCCESS;
9032
9033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9034 }
9035 }
9036 else
9037 {
9038 switch (pVCpu->iem.s.enmEffOpSize)
9039 {
9040 case IEMMODE_16BIT:
9041 IEM_MC_BEGIN(4, 2);
9042 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9043 IEM_MC_ARG(uint16_t, u16Src, 1);
9044 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9045 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9047
9048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9050 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9051 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9052 IEM_MC_FETCH_EFLAGS(EFlags);
9053 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9054 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9055
9056 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9057 IEM_MC_COMMIT_EFLAGS(EFlags);
9058 IEM_MC_ADVANCE_RIP();
9059 IEM_MC_END();
9060 return VINF_SUCCESS;
9061
9062 case IEMMODE_32BIT:
9063 IEM_MC_BEGIN(4, 2);
9064 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9065 IEM_MC_ARG(uint32_t, u32Src, 1);
9066 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9067 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9069
9070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9072 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9073 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9074 IEM_MC_FETCH_EFLAGS(EFlags);
9075 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9076 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9077
9078 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9079 IEM_MC_COMMIT_EFLAGS(EFlags);
9080 IEM_MC_ADVANCE_RIP();
9081 IEM_MC_END();
9082 return VINF_SUCCESS;
9083
9084 case IEMMODE_64BIT:
9085 IEM_MC_BEGIN(4, 2);
9086 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9087 IEM_MC_ARG(uint64_t, u64Src, 1);
9088 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9089 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9091
9092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9094 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9095 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9096 IEM_MC_FETCH_EFLAGS(EFlags);
9097 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9098 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9099
9100 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9101 IEM_MC_COMMIT_EFLAGS(EFlags);
9102 IEM_MC_ADVANCE_RIP();
9103 IEM_MC_END();
9104 return VINF_SUCCESS;
9105
9106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9107 }
9108 }
9109}
9110
9111
9112
9113/** Opcode 0x0f 0xa4. */
9114FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9115{
9116 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9117 IEMOP_HLP_MIN_386();
9118 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9119}
9120
9121
9122/** Opcode 0x0f 0xa5. */
9123FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9124{
9125 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9126 IEMOP_HLP_MIN_386();
9127 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9128}
9129
9130
9131/** Opcode 0x0f 0xa8. */
9132FNIEMOP_DEF(iemOp_push_gs)
9133{
9134 IEMOP_MNEMONIC(push_gs, "push gs");
9135 IEMOP_HLP_MIN_386();
9136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9137 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9138}
9139
9140
9141/** Opcode 0x0f 0xa9. */
9142FNIEMOP_DEF(iemOp_pop_gs)
9143{
9144 IEMOP_MNEMONIC(pop_gs, "pop gs");
9145 IEMOP_HLP_MIN_386();
9146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9147 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9148}
9149
9150
9151/** Opcode 0x0f 0xaa. */
9152FNIEMOP_DEF(iemOp_rsm)
9153{
9154 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9155 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9157 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9158}
9159
9160
9161
9162/** Opcode 0x0f 0xab. */
9163FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9164{
9165 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9166 IEMOP_HLP_MIN_386();
9167 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
9168}
9169
9170
9171/** Opcode 0x0f 0xac. */
9172FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9173{
9174 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9175 IEMOP_HLP_MIN_386();
9176 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9177}
9178
9179
9180/** Opcode 0x0f 0xad. */
9181FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9182{
9183 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9184 IEMOP_HLP_MIN_386();
9185 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9186}
9187
9188
9189/** Opcode 0x0f 0xae mem/0. */
9190FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9191{
9192 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9193 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9194 return IEMOP_RAISE_INVALID_OPCODE();
9195
9196 IEM_MC_BEGIN(3, 1);
9197 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9198 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9199 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9202 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9203 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9204 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9205 IEM_MC_END();
9206 return VINF_SUCCESS;
9207}
9208
9209
9210/** Opcode 0x0f 0xae mem/1. */
9211FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9212{
9213 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9214 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9215 return IEMOP_RAISE_INVALID_OPCODE();
9216
9217 IEM_MC_BEGIN(3, 1);
9218 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9219 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9220 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9223 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9224 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9225 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9226 IEM_MC_END();
9227 return VINF_SUCCESS;
9228}
9229
9230
9231/**
9232 * @opmaps grp15
9233 * @opcode !11/2
9234 * @oppfx none
9235 * @opcpuid sse
9236 * @opgroup og_sse_mxcsrsm
9237 * @opxcpttype 5
9238 * @optest op1=0 -> mxcsr=0
9239 * @optest op1=0x2083 -> mxcsr=0x2083
9240 * @optest op1=0xfffffffe -> value.xcpt=0xd
9241 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9242 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9243 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9244 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9245 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9246 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9247 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9248 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9249 */
9250FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9251{
9252 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9253 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9254 return IEMOP_RAISE_INVALID_OPCODE();
9255
9256 IEM_MC_BEGIN(2, 0);
9257 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9258 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9261 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9262 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9263 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9264 IEM_MC_END();
9265 return VINF_SUCCESS;
9266}
9267
9268
9269/**
9270 * @opmaps grp15
9271 * @opcode !11/3
9272 * @oppfx none
9273 * @opcpuid sse
9274 * @opgroup og_sse_mxcsrsm
9275 * @opxcpttype 5
9276 * @optest mxcsr=0 -> op1=0
9277 * @optest mxcsr=0x2083 -> op1=0x2083
9278 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9279 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9280 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9281 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9282 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9283 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9284 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9285 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9286 */
9287FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9288{
9289 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9290 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9291 return IEMOP_RAISE_INVALID_OPCODE();
9292
9293 IEM_MC_BEGIN(2, 0);
9294 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9295 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9298 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9299 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9300 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9301 IEM_MC_END();
9302 return VINF_SUCCESS;
9303}
9304
9305
9306/**
9307 * @opmaps grp15
9308 * @opcode !11/4
9309 * @oppfx none
9310 * @opcpuid xsave
9311 * @opgroup og_system
9312 * @opxcpttype none
9313 */
9314FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9315{
9316 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9317 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9318 return IEMOP_RAISE_INVALID_OPCODE();
9319
9320 IEM_MC_BEGIN(3, 0);
9321 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9322 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9323 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9326 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9327 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9328 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9329 IEM_MC_END();
9330 return VINF_SUCCESS;
9331}
9332
9333
9334/**
9335 * @opmaps grp15
9336 * @opcode !11/5
9337 * @oppfx none
9338 * @opcpuid xsave
9339 * @opgroup og_system
9340 * @opxcpttype none
9341 */
9342FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9343{
9344 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9345 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9346 return IEMOP_RAISE_INVALID_OPCODE();
9347
9348 IEM_MC_BEGIN(3, 0);
9349 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9350 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9351 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9354 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9355 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9356 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9357 IEM_MC_END();
9358 return VINF_SUCCESS;
9359}
9360
9361/** Opcode 0x0f 0xae mem/6. */
9362FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9363
9364/**
9365 * @opmaps grp15
9366 * @opcode !11/7
9367 * @oppfx none
9368 * @opcpuid clfsh
9369 * @opgroup og_cachectl
9370 * @optest op1=1 ->
9371 */
9372FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9373{
9374 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9375 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9376 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9377
9378 IEM_MC_BEGIN(2, 0);
9379 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9380 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9383 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9384 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9385 IEM_MC_END();
9386 return VINF_SUCCESS;
9387}
9388
9389/**
9390 * @opmaps grp15
9391 * @opcode !11/7
9392 * @oppfx 0x66
9393 * @opcpuid clflushopt
9394 * @opgroup og_cachectl
9395 * @optest op1=1 ->
9396 */
9397FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9398{
9399 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9400 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9401 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9402
9403 IEM_MC_BEGIN(2, 0);
9404 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9405 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9408 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9409 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9410 IEM_MC_END();
9411 return VINF_SUCCESS;
9412}
9413
9414
9415/** Opcode 0x0f 0xae 11b/5. */
9416FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9417{
9418 RT_NOREF_PV(bRm);
9419 IEMOP_MNEMONIC(lfence, "lfence");
9420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9421 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9422 return IEMOP_RAISE_INVALID_OPCODE();
9423
9424 IEM_MC_BEGIN(0, 0);
9425#ifndef RT_ARCH_ARM64
9426 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9427#endif
9428 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9429#ifndef RT_ARCH_ARM64
9430 else
9431 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9432#endif
9433 IEM_MC_ADVANCE_RIP();
9434 IEM_MC_END();
9435 return VINF_SUCCESS;
9436}
9437
9438
9439/** Opcode 0x0f 0xae 11b/6. */
9440FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9441{
9442 RT_NOREF_PV(bRm);
9443 IEMOP_MNEMONIC(mfence, "mfence");
9444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9445 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9446 return IEMOP_RAISE_INVALID_OPCODE();
9447
9448 IEM_MC_BEGIN(0, 0);
9449#ifndef RT_ARCH_ARM64
9450 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9451#endif
9452 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9453#ifndef RT_ARCH_ARM64
9454 else
9455 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9456#endif
9457 IEM_MC_ADVANCE_RIP();
9458 IEM_MC_END();
9459 return VINF_SUCCESS;
9460}
9461
9462
9463/** Opcode 0x0f 0xae 11b/7. */
9464FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9465{
9466 RT_NOREF_PV(bRm);
9467 IEMOP_MNEMONIC(sfence, "sfence");
9468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9469 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9470 return IEMOP_RAISE_INVALID_OPCODE();
9471
9472 IEM_MC_BEGIN(0, 0);
9473#ifndef RT_ARCH_ARM64
9474 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9475#endif
9476 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9477#ifndef RT_ARCH_ARM64
9478 else
9479 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9480#endif
9481 IEM_MC_ADVANCE_RIP();
9482 IEM_MC_END();
9483 return VINF_SUCCESS;
9484}
9485
9486
9487/** Opcode 0xf3 0x0f 0xae 11b/0. */
9488FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9489{
9490 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9492 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9493 {
9494 IEM_MC_BEGIN(1, 0);
9495 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9496 IEM_MC_ARG(uint64_t, u64Dst, 0);
9497 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9498 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9499 IEM_MC_ADVANCE_RIP();
9500 IEM_MC_END();
9501 }
9502 else
9503 {
9504 IEM_MC_BEGIN(1, 0);
9505 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9506 IEM_MC_ARG(uint32_t, u32Dst, 0);
9507 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9508 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9509 IEM_MC_ADVANCE_RIP();
9510 IEM_MC_END();
9511 }
9512 return VINF_SUCCESS;
9513}
9514
9515
9516/** Opcode 0xf3 0x0f 0xae 11b/1. */
9517FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9518{
9519 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9521 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9522 {
9523 IEM_MC_BEGIN(1, 0);
9524 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9525 IEM_MC_ARG(uint64_t, u64Dst, 0);
9526 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9527 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9528 IEM_MC_ADVANCE_RIP();
9529 IEM_MC_END();
9530 }
9531 else
9532 {
9533 IEM_MC_BEGIN(1, 0);
9534 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9535 IEM_MC_ARG(uint32_t, u32Dst, 0);
9536 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9537 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9538 IEM_MC_ADVANCE_RIP();
9539 IEM_MC_END();
9540 }
9541 return VINF_SUCCESS;
9542}
9543
9544
9545/** Opcode 0xf3 0x0f 0xae 11b/2. */
9546FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9547{
9548 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9550 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9551 {
9552 IEM_MC_BEGIN(1, 0);
9553 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9554 IEM_MC_ARG(uint64_t, u64Dst, 0);
9555 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9556 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9557 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9558 IEM_MC_ADVANCE_RIP();
9559 IEM_MC_END();
9560 }
9561 else
9562 {
9563 IEM_MC_BEGIN(1, 0);
9564 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9565 IEM_MC_ARG(uint32_t, u32Dst, 0);
9566 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9567 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9568 IEM_MC_ADVANCE_RIP();
9569 IEM_MC_END();
9570 }
9571 return VINF_SUCCESS;
9572}
9573
9574
9575/** Opcode 0xf3 0x0f 0xae 11b/3. */
9576FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9577{
9578 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9580 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9581 {
9582 IEM_MC_BEGIN(1, 0);
9583 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9584 IEM_MC_ARG(uint64_t, u64Dst, 0);
9585 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9586 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9587 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9588 IEM_MC_ADVANCE_RIP();
9589 IEM_MC_END();
9590 }
9591 else
9592 {
9593 IEM_MC_BEGIN(1, 0);
9594 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9595 IEM_MC_ARG(uint32_t, u32Dst, 0);
9596 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9597 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
9598 IEM_MC_ADVANCE_RIP();
9599 IEM_MC_END();
9600 }
9601 return VINF_SUCCESS;
9602}
9603
9604
9605/**
9606 * Group 15 jump table for register variant.
9607 */
9608IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
9609{ /* pfx: none, 066h, 0f3h, 0f2h */
9610 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
9611 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
9612 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
9613 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
9614 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9615 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9616 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9617 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9618};
9619AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
9620
9621
9622/**
9623 * Group 15 jump table for memory variant.
9624 */
9625IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
9626{ /* pfx: none, 066h, 0f3h, 0f2h */
9627 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9628 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9629 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9630 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9631 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9632 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9633 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9634 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9635};
9636AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
9637
9638
9639/** Opcode 0x0f 0xae. */
9640FNIEMOP_DEF(iemOp_Grp15)
9641{
9642 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
9643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9644 if (IEM_IS_MODRM_REG_MODE(bRm))
9645 /* register, register */
9646 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9647 + pVCpu->iem.s.idxPrefix], bRm);
9648 /* memory, register */
9649 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9650 + pVCpu->iem.s.idxPrefix], bRm);
9651}
9652
9653
9654/** Opcode 0x0f 0xaf. */
9655FNIEMOP_DEF(iemOp_imul_Gv_Ev)
9656{
9657 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
9658 IEMOP_HLP_MIN_386();
9659 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9660 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
9661}
9662
9663
9664/** Opcode 0x0f 0xb0. */
9665FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
9666{
9667 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
9668 IEMOP_HLP_MIN_486();
9669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9670
9671 if (IEM_IS_MODRM_REG_MODE(bRm))
9672 {
9673 IEMOP_HLP_DONE_DECODING();
9674 IEM_MC_BEGIN(4, 0);
9675 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9676 IEM_MC_ARG(uint8_t *, pu8Al, 1);
9677 IEM_MC_ARG(uint8_t, u8Src, 2);
9678 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9679
9680 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9681 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9682 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
9683 IEM_MC_REF_EFLAGS(pEFlags);
9684 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9685 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
9686 else
9687 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
9688
9689 IEM_MC_ADVANCE_RIP();
9690 IEM_MC_END();
9691 }
9692 else
9693 {
9694 IEM_MC_BEGIN(4, 3);
9695 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9696 IEM_MC_ARG(uint8_t *, pu8Al, 1);
9697 IEM_MC_ARG(uint8_t, u8Src, 2);
9698 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9700 IEM_MC_LOCAL(uint8_t, u8Al);
9701
9702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9703 IEMOP_HLP_DONE_DECODING();
9704 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9705 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9706 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
9707 IEM_MC_FETCH_EFLAGS(EFlags);
9708 IEM_MC_REF_LOCAL(pu8Al, u8Al);
9709 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9710 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
9711 else
9712 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
9713
9714 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9715 IEM_MC_COMMIT_EFLAGS(EFlags);
9716 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
9717 IEM_MC_ADVANCE_RIP();
9718 IEM_MC_END();
9719 }
9720 return VINF_SUCCESS;
9721}
9722
9723/** Opcode 0x0f 0xb1. */
9724FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
9725{
9726 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
9727 IEMOP_HLP_MIN_486();
9728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9729
9730 if (IEM_IS_MODRM_REG_MODE(bRm))
9731 {
9732 IEMOP_HLP_DONE_DECODING();
9733 switch (pVCpu->iem.s.enmEffOpSize)
9734 {
9735 case IEMMODE_16BIT:
9736 IEM_MC_BEGIN(4, 0);
9737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9738 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
9739 IEM_MC_ARG(uint16_t, u16Src, 2);
9740 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9741
9742 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9743 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9744 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
9745 IEM_MC_REF_EFLAGS(pEFlags);
9746 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9747 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
9748 else
9749 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
9750
9751 IEM_MC_ADVANCE_RIP();
9752 IEM_MC_END();
9753 return VINF_SUCCESS;
9754
9755 case IEMMODE_32BIT:
9756 IEM_MC_BEGIN(4, 0);
9757 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9758 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
9759 IEM_MC_ARG(uint32_t, u32Src, 2);
9760 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9761
9762 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9763 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9764 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
9765 IEM_MC_REF_EFLAGS(pEFlags);
9766 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9767 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
9768 else
9769 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
9770
9771 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9772 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9773 } IEM_MC_ELSE() {
9774 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
9775 } IEM_MC_ENDIF();
9776
9777 IEM_MC_ADVANCE_RIP();
9778 IEM_MC_END();
9779 return VINF_SUCCESS;
9780
9781 case IEMMODE_64BIT:
9782 IEM_MC_BEGIN(4, 0);
9783 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9784 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
9785#ifdef RT_ARCH_X86
9786 IEM_MC_ARG(uint64_t *, pu64Src, 2);
9787#else
9788 IEM_MC_ARG(uint64_t, u64Src, 2);
9789#endif
9790 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9791
9792 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9793 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
9794 IEM_MC_REF_EFLAGS(pEFlags);
9795#ifdef RT_ARCH_X86
9796 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9797 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9798 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
9799 else
9800 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
9801#else
9802 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9803 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9804 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
9805 else
9806 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
9807#endif
9808
9809 IEM_MC_ADVANCE_RIP();
9810 IEM_MC_END();
9811 return VINF_SUCCESS;
9812
9813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9814 }
9815 }
9816 else
9817 {
9818 switch (pVCpu->iem.s.enmEffOpSize)
9819 {
9820 case IEMMODE_16BIT:
9821 IEM_MC_BEGIN(4, 3);
9822 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9823 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
9824 IEM_MC_ARG(uint16_t, u16Src, 2);
9825 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9827 IEM_MC_LOCAL(uint16_t, u16Ax);
9828
9829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9830 IEMOP_HLP_DONE_DECODING();
9831 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9832 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9833 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
9834 IEM_MC_FETCH_EFLAGS(EFlags);
9835 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
9836 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9837 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
9838 else
9839 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
9840
9841 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9842 IEM_MC_COMMIT_EFLAGS(EFlags);
9843 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
9844 IEM_MC_ADVANCE_RIP();
9845 IEM_MC_END();
9846 return VINF_SUCCESS;
9847
9848 case IEMMODE_32BIT:
9849 IEM_MC_BEGIN(4, 3);
9850 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9851 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
9852 IEM_MC_ARG(uint32_t, u32Src, 2);
9853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9855 IEM_MC_LOCAL(uint32_t, u32Eax);
9856
9857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9858 IEMOP_HLP_DONE_DECODING();
9859 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9860 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9861 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
9862 IEM_MC_FETCH_EFLAGS(EFlags);
9863 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
9864 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9865 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
9866 else
9867 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
9868
9869 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9870 IEM_MC_COMMIT_EFLAGS(EFlags);
9871 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
9872 IEM_MC_ADVANCE_RIP();
9873 IEM_MC_END();
9874 return VINF_SUCCESS;
9875
9876 case IEMMODE_64BIT:
9877 IEM_MC_BEGIN(4, 3);
9878 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9879 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
9880#ifdef RT_ARCH_X86
9881 IEM_MC_ARG(uint64_t *, pu64Src, 2);
9882#else
9883 IEM_MC_ARG(uint64_t, u64Src, 2);
9884#endif
9885 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9887 IEM_MC_LOCAL(uint64_t, u64Rax);
9888
9889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9890 IEMOP_HLP_DONE_DECODING();
9891 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9892 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
9893 IEM_MC_FETCH_EFLAGS(EFlags);
9894 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
9895#ifdef RT_ARCH_X86
9896 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9897 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9898 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
9899 else
9900 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
9901#else
9902 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9903 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9904 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
9905 else
9906 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
9907#endif
9908
9909 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9910 IEM_MC_COMMIT_EFLAGS(EFlags);
9911 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
9912 IEM_MC_ADVANCE_RIP();
9913 IEM_MC_END();
9914 return VINF_SUCCESS;
9915
9916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9917 }
9918 }
9919}
9920
9921
9922FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
9923{
9924 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
9925 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
9926
9927 switch (pVCpu->iem.s.enmEffOpSize)
9928 {
9929 case IEMMODE_16BIT:
9930 IEM_MC_BEGIN(5, 1);
9931 IEM_MC_ARG(uint16_t, uSel, 0);
9932 IEM_MC_ARG(uint16_t, offSeg, 1);
9933 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9934 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9935 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9936 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9939 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9940 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
9941 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9942 IEM_MC_END();
9943 return VINF_SUCCESS;
9944
9945 case IEMMODE_32BIT:
9946 IEM_MC_BEGIN(5, 1);
9947 IEM_MC_ARG(uint16_t, uSel, 0);
9948 IEM_MC_ARG(uint32_t, offSeg, 1);
9949 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9950 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9951 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9952 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9955 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9956 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
9957 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9958 IEM_MC_END();
9959 return VINF_SUCCESS;
9960
9961 case IEMMODE_64BIT:
9962 IEM_MC_BEGIN(5, 1);
9963 IEM_MC_ARG(uint16_t, uSel, 0);
9964 IEM_MC_ARG(uint64_t, offSeg, 1);
9965 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9966 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9967 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9968 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9971 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
9972 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9973 else
9974 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9975 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
9976 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9977 IEM_MC_END();
9978 return VINF_SUCCESS;
9979
9980 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9981 }
9982}
9983
9984
9985/** Opcode 0x0f 0xb2. */
9986FNIEMOP_DEF(iemOp_lss_Gv_Mp)
9987{
9988 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
9989 IEMOP_HLP_MIN_386();
9990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9991 if (IEM_IS_MODRM_REG_MODE(bRm))
9992 return IEMOP_RAISE_INVALID_OPCODE();
9993 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
9994}
9995
9996
9997/** Opcode 0x0f 0xb3. */
9998FNIEMOP_DEF(iemOp_btr_Ev_Gv)
9999{
10000 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10001 IEMOP_HLP_MIN_386();
10002 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
10003}
10004
10005
10006/** Opcode 0x0f 0xb4. */
10007FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10008{
10009 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10010 IEMOP_HLP_MIN_386();
10011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10012 if (IEM_IS_MODRM_REG_MODE(bRm))
10013 return IEMOP_RAISE_INVALID_OPCODE();
10014 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10015}
10016
10017
10018/** Opcode 0x0f 0xb5. */
10019FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10020{
10021 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10022 IEMOP_HLP_MIN_386();
10023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10024 if (IEM_IS_MODRM_REG_MODE(bRm))
10025 return IEMOP_RAISE_INVALID_OPCODE();
10026 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10027}
10028
10029
10030/** Opcode 0x0f 0xb6. */
10031FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10032{
10033 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10034 IEMOP_HLP_MIN_386();
10035
10036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10037
10038 /*
10039 * If rm is denoting a register, no more instruction bytes.
10040 */
10041 if (IEM_IS_MODRM_REG_MODE(bRm))
10042 {
10043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10044 switch (pVCpu->iem.s.enmEffOpSize)
10045 {
10046 case IEMMODE_16BIT:
10047 IEM_MC_BEGIN(0, 1);
10048 IEM_MC_LOCAL(uint16_t, u16Value);
10049 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10050 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10051 IEM_MC_ADVANCE_RIP();
10052 IEM_MC_END();
10053 return VINF_SUCCESS;
10054
10055 case IEMMODE_32BIT:
10056 IEM_MC_BEGIN(0, 1);
10057 IEM_MC_LOCAL(uint32_t, u32Value);
10058 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10059 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10060 IEM_MC_ADVANCE_RIP();
10061 IEM_MC_END();
10062 return VINF_SUCCESS;
10063
10064 case IEMMODE_64BIT:
10065 IEM_MC_BEGIN(0, 1);
10066 IEM_MC_LOCAL(uint64_t, u64Value);
10067 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10068 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10069 IEM_MC_ADVANCE_RIP();
10070 IEM_MC_END();
10071 return VINF_SUCCESS;
10072
10073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10074 }
10075 }
10076 else
10077 {
10078 /*
10079 * We're loading a register from memory.
10080 */
10081 switch (pVCpu->iem.s.enmEffOpSize)
10082 {
10083 case IEMMODE_16BIT:
10084 IEM_MC_BEGIN(0, 2);
10085 IEM_MC_LOCAL(uint16_t, u16Value);
10086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10089 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10090 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10091 IEM_MC_ADVANCE_RIP();
10092 IEM_MC_END();
10093 return VINF_SUCCESS;
10094
10095 case IEMMODE_32BIT:
10096 IEM_MC_BEGIN(0, 2);
10097 IEM_MC_LOCAL(uint32_t, u32Value);
10098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10101 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10102 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10103 IEM_MC_ADVANCE_RIP();
10104 IEM_MC_END();
10105 return VINF_SUCCESS;
10106
10107 case IEMMODE_64BIT:
10108 IEM_MC_BEGIN(0, 2);
10109 IEM_MC_LOCAL(uint64_t, u64Value);
10110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10113 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10114 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10115 IEM_MC_ADVANCE_RIP();
10116 IEM_MC_END();
10117 return VINF_SUCCESS;
10118
10119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10120 }
10121 }
10122}
10123
10124
10125/** Opcode 0x0f 0xb7. */
10126FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10127{
10128 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10129 IEMOP_HLP_MIN_386();
10130
10131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10132
10133 /** @todo Not entirely sure how the operand size prefix is handled here,
10134 * assuming that it will be ignored. Would be nice to have a few
10135 * test for this. */
10136 /*
10137 * If rm is denoting a register, no more instruction bytes.
10138 */
10139 if (IEM_IS_MODRM_REG_MODE(bRm))
10140 {
10141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10142 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10143 {
10144 IEM_MC_BEGIN(0, 1);
10145 IEM_MC_LOCAL(uint32_t, u32Value);
10146 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10147 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10148 IEM_MC_ADVANCE_RIP();
10149 IEM_MC_END();
10150 }
10151 else
10152 {
10153 IEM_MC_BEGIN(0, 1);
10154 IEM_MC_LOCAL(uint64_t, u64Value);
10155 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10156 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10157 IEM_MC_ADVANCE_RIP();
10158 IEM_MC_END();
10159 }
10160 }
10161 else
10162 {
10163 /*
10164 * We're loading a register from memory.
10165 */
10166 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10167 {
10168 IEM_MC_BEGIN(0, 2);
10169 IEM_MC_LOCAL(uint32_t, u32Value);
10170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10173 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10174 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10175 IEM_MC_ADVANCE_RIP();
10176 IEM_MC_END();
10177 }
10178 else
10179 {
10180 IEM_MC_BEGIN(0, 2);
10181 IEM_MC_LOCAL(uint64_t, u64Value);
10182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10185 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10186 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10187 IEM_MC_ADVANCE_RIP();
10188 IEM_MC_END();
10189 }
10190 }
10191 return VINF_SUCCESS;
10192}
10193
10194
10195/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10196FNIEMOP_UD_STUB(iemOp_jmpe);
10197
10198
10199/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10200FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10201{
10202 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10203 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10204 return iemOp_InvalidNeedRM(pVCpu);
10205#ifndef TST_IEM_CHECK_MC
10206# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10207 static const IEMOPBINSIZES s_Native =
10208 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10209# endif
10210 static const IEMOPBINSIZES s_Fallback =
10211 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10212#endif
10213 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
10214}
10215
10216
10217/**
10218 * @opcode 0xb9
10219 * @opinvalid intel-modrm
10220 * @optest ->
10221 */
10222FNIEMOP_DEF(iemOp_Grp10)
10223{
10224 /*
10225 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10226 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10227 */
10228 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10229 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10230 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10231}
10232
10233
10234/** Opcode 0x0f 0xba. */
10235FNIEMOP_DEF(iemOp_Grp8)
10236{
10237 IEMOP_HLP_MIN_386();
10238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10239 PCIEMOPBINSIZES pImpl;
10240 switch (IEM_GET_MODRM_REG_8(bRm))
10241 {
10242 case 0: case 1: case 2: case 3:
10243 /* Both AMD and Intel want full modr/m decoding and imm8. */
10244 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10245 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
10246 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
10247 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
10248 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
10249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10250 }
10251 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10252
10253 if (IEM_IS_MODRM_REG_MODE(bRm))
10254 {
10255 /* register destination. */
10256 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10258
10259 switch (pVCpu->iem.s.enmEffOpSize)
10260 {
10261 case IEMMODE_16BIT:
10262 IEM_MC_BEGIN(3, 0);
10263 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10264 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
10265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10266
10267 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10268 IEM_MC_REF_EFLAGS(pEFlags);
10269 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10270
10271 IEM_MC_ADVANCE_RIP();
10272 IEM_MC_END();
10273 return VINF_SUCCESS;
10274
10275 case IEMMODE_32BIT:
10276 IEM_MC_BEGIN(3, 0);
10277 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10278 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
10279 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10280
10281 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10282 IEM_MC_REF_EFLAGS(pEFlags);
10283 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10284
10285 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10286 IEM_MC_ADVANCE_RIP();
10287 IEM_MC_END();
10288 return VINF_SUCCESS;
10289
10290 case IEMMODE_64BIT:
10291 IEM_MC_BEGIN(3, 0);
10292 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10293 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
10294 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10295
10296 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10297 IEM_MC_REF_EFLAGS(pEFlags);
10298 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10299
10300 IEM_MC_ADVANCE_RIP();
10301 IEM_MC_END();
10302 return VINF_SUCCESS;
10303
10304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10305 }
10306 }
10307 else
10308 {
10309 /* memory destination. */
10310
10311 uint32_t fAccess;
10312 if (pImpl->pfnLockedU16)
10313 fAccess = IEM_ACCESS_DATA_RW;
10314 else /* BT */
10315 fAccess = IEM_ACCESS_DATA_R;
10316
10317 /** @todo test negative bit offsets! */
10318 switch (pVCpu->iem.s.enmEffOpSize)
10319 {
10320 case IEMMODE_16BIT:
10321 IEM_MC_BEGIN(3, 1);
10322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10323 IEM_MC_ARG(uint16_t, u16Src, 1);
10324 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10326
10327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10328 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10329 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
10330 if (pImpl->pfnLockedU16)
10331 IEMOP_HLP_DONE_DECODING();
10332 else
10333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10334 IEM_MC_FETCH_EFLAGS(EFlags);
10335 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10336 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10337 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10338 else
10339 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10340 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10341
10342 IEM_MC_COMMIT_EFLAGS(EFlags);
10343 IEM_MC_ADVANCE_RIP();
10344 IEM_MC_END();
10345 return VINF_SUCCESS;
10346
10347 case IEMMODE_32BIT:
10348 IEM_MC_BEGIN(3, 1);
10349 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10350 IEM_MC_ARG(uint32_t, u32Src, 1);
10351 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10353
10354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10355 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10356 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
10357 if (pImpl->pfnLockedU16)
10358 IEMOP_HLP_DONE_DECODING();
10359 else
10360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10361 IEM_MC_FETCH_EFLAGS(EFlags);
10362 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10363 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10364 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10365 else
10366 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10367 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10368
10369 IEM_MC_COMMIT_EFLAGS(EFlags);
10370 IEM_MC_ADVANCE_RIP();
10371 IEM_MC_END();
10372 return VINF_SUCCESS;
10373
10374 case IEMMODE_64BIT:
10375 IEM_MC_BEGIN(3, 1);
10376 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10377 IEM_MC_ARG(uint64_t, u64Src, 1);
10378 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10380
10381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10382 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10383 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
10384 if (pImpl->pfnLockedU16)
10385 IEMOP_HLP_DONE_DECODING();
10386 else
10387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10388 IEM_MC_FETCH_EFLAGS(EFlags);
10389 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10390 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10391 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10392 else
10393 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10394 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10395
10396 IEM_MC_COMMIT_EFLAGS(EFlags);
10397 IEM_MC_ADVANCE_RIP();
10398 IEM_MC_END();
10399 return VINF_SUCCESS;
10400
10401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10402 }
10403 }
10404}
10405
10406
10407/** Opcode 0x0f 0xbb. */
10408FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10409{
10410 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10411 IEMOP_HLP_MIN_386();
10412 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
10413}
10414
10415
10416/**
10417 * Common worker for BSF and BSR instructions.
10418 *
10419 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10420 * the destination register, which means that for 32-bit operations the high
10421 * bits must be left alone.
10422 *
10423 * @param pImpl Pointer to the instruction implementation (assembly).
10424 */
10425FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10426{
10427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10428
10429 /*
10430 * If rm is denoting a register, no more instruction bytes.
10431 */
10432 if (IEM_IS_MODRM_REG_MODE(bRm))
10433 {
10434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10435 switch (pVCpu->iem.s.enmEffOpSize)
10436 {
10437 case IEMMODE_16BIT:
10438 IEM_MC_BEGIN(3, 0);
10439 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10440 IEM_MC_ARG(uint16_t, u16Src, 1);
10441 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10442
10443 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10444 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10445 IEM_MC_REF_EFLAGS(pEFlags);
10446 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10447
10448 IEM_MC_ADVANCE_RIP();
10449 IEM_MC_END();
10450 break;
10451
10452 case IEMMODE_32BIT:
10453 IEM_MC_BEGIN(3, 0);
10454 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10455 IEM_MC_ARG(uint32_t, u32Src, 1);
10456 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10457
10458 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10459 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10460 IEM_MC_REF_EFLAGS(pEFlags);
10461 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10462 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10463 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10464 IEM_MC_ENDIF();
10465 IEM_MC_ADVANCE_RIP();
10466 IEM_MC_END();
10467 break;
10468
10469 case IEMMODE_64BIT:
10470 IEM_MC_BEGIN(3, 0);
10471 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10472 IEM_MC_ARG(uint64_t, u64Src, 1);
10473 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10474
10475 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10476 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10477 IEM_MC_REF_EFLAGS(pEFlags);
10478 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10479
10480 IEM_MC_ADVANCE_RIP();
10481 IEM_MC_END();
10482 break;
10483 }
10484 }
10485 else
10486 {
10487 /*
10488 * We're accessing memory.
10489 */
10490 switch (pVCpu->iem.s.enmEffOpSize)
10491 {
10492 case IEMMODE_16BIT:
10493 IEM_MC_BEGIN(3, 1);
10494 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10495 IEM_MC_ARG(uint16_t, u16Src, 1);
10496 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10498
10499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10501 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10502 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10503 IEM_MC_REF_EFLAGS(pEFlags);
10504 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10505
10506 IEM_MC_ADVANCE_RIP();
10507 IEM_MC_END();
10508 break;
10509
10510 case IEMMODE_32BIT:
10511 IEM_MC_BEGIN(3, 1);
10512 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10513 IEM_MC_ARG(uint32_t, u32Src, 1);
10514 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10516
10517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10519 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10520 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10521 IEM_MC_REF_EFLAGS(pEFlags);
10522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10523
10524 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10525 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10526 IEM_MC_ENDIF();
10527 IEM_MC_ADVANCE_RIP();
10528 IEM_MC_END();
10529 break;
10530
10531 case IEMMODE_64BIT:
10532 IEM_MC_BEGIN(3, 1);
10533 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10534 IEM_MC_ARG(uint64_t, u64Src, 1);
10535 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10537
10538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10540 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10541 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10542 IEM_MC_REF_EFLAGS(pEFlags);
10543 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10544
10545 IEM_MC_ADVANCE_RIP();
10546 IEM_MC_END();
10547 break;
10548 }
10549 }
10550 return VINF_SUCCESS;
10551}
10552
10553
10554/** Opcode 0x0f 0xbc. */
10555FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
10556{
10557 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
10558 IEMOP_HLP_MIN_386();
10559 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10560 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
10561}
10562
10563
10564/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
10565FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
10566{
10567 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10568 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
10569 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10570
10571#ifndef TST_IEM_CHECK_MC
10572 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
10573 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
10574 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
10575 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
10576 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
10577 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
10578 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
10579 {
10580 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
10581 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
10582 };
10583#endif
10584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10585 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10586 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10587}
10588
10589
10590/** Opcode 0x0f 0xbd. */
10591FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
10592{
10593 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
10594 IEMOP_HLP_MIN_386();
10595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10596 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
10597}
10598
10599
10600/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
10601FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
10602{
10603 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10604 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
10605 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10606
10607#ifndef TST_IEM_CHECK_MC
10608 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
10609 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
10610 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
10611 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
10612 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
10613 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
10614 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
10615 {
10616 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
10617 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
10618 };
10619#endif
10620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10622 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10623}
10624
10625
10626
10627/** Opcode 0x0f 0xbe. */
10628FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
10629{
10630 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
10631 IEMOP_HLP_MIN_386();
10632
10633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10634
10635 /*
10636 * If rm is denoting a register, no more instruction bytes.
10637 */
10638 if (IEM_IS_MODRM_REG_MODE(bRm))
10639 {
10640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10641 switch (pVCpu->iem.s.enmEffOpSize)
10642 {
10643 case IEMMODE_16BIT:
10644 IEM_MC_BEGIN(0, 1);
10645 IEM_MC_LOCAL(uint16_t, u16Value);
10646 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10647 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10648 IEM_MC_ADVANCE_RIP();
10649 IEM_MC_END();
10650 return VINF_SUCCESS;
10651
10652 case IEMMODE_32BIT:
10653 IEM_MC_BEGIN(0, 1);
10654 IEM_MC_LOCAL(uint32_t, u32Value);
10655 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10656 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10657 IEM_MC_ADVANCE_RIP();
10658 IEM_MC_END();
10659 return VINF_SUCCESS;
10660
10661 case IEMMODE_64BIT:
10662 IEM_MC_BEGIN(0, 1);
10663 IEM_MC_LOCAL(uint64_t, u64Value);
10664 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10665 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10666 IEM_MC_ADVANCE_RIP();
10667 IEM_MC_END();
10668 return VINF_SUCCESS;
10669
10670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10671 }
10672 }
10673 else
10674 {
10675 /*
10676 * We're loading a register from memory.
10677 */
10678 switch (pVCpu->iem.s.enmEffOpSize)
10679 {
10680 case IEMMODE_16BIT:
10681 IEM_MC_BEGIN(0, 2);
10682 IEM_MC_LOCAL(uint16_t, u16Value);
10683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10686 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10687 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10688 IEM_MC_ADVANCE_RIP();
10689 IEM_MC_END();
10690 return VINF_SUCCESS;
10691
10692 case IEMMODE_32BIT:
10693 IEM_MC_BEGIN(0, 2);
10694 IEM_MC_LOCAL(uint32_t, u32Value);
10695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10698 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10699 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10700 IEM_MC_ADVANCE_RIP();
10701 IEM_MC_END();
10702 return VINF_SUCCESS;
10703
10704 case IEMMODE_64BIT:
10705 IEM_MC_BEGIN(0, 2);
10706 IEM_MC_LOCAL(uint64_t, u64Value);
10707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10710 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10711 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10712 IEM_MC_ADVANCE_RIP();
10713 IEM_MC_END();
10714 return VINF_SUCCESS;
10715
10716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10717 }
10718 }
10719}
10720
10721
10722/** Opcode 0x0f 0xbf. */
10723FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
10724{
10725 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
10726 IEMOP_HLP_MIN_386();
10727
10728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10729
10730 /** @todo Not entirely sure how the operand size prefix is handled here,
10731 * assuming that it will be ignored. Would be nice to have a few
10732 * test for this. */
10733 /*
10734 * If rm is denoting a register, no more instruction bytes.
10735 */
10736 if (IEM_IS_MODRM_REG_MODE(bRm))
10737 {
10738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10739 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10740 {
10741 IEM_MC_BEGIN(0, 1);
10742 IEM_MC_LOCAL(uint32_t, u32Value);
10743 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10744 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10745 IEM_MC_ADVANCE_RIP();
10746 IEM_MC_END();
10747 }
10748 else
10749 {
10750 IEM_MC_BEGIN(0, 1);
10751 IEM_MC_LOCAL(uint64_t, u64Value);
10752 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10753 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10754 IEM_MC_ADVANCE_RIP();
10755 IEM_MC_END();
10756 }
10757 }
10758 else
10759 {
10760 /*
10761 * We're loading a register from memory.
10762 */
10763 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10764 {
10765 IEM_MC_BEGIN(0, 2);
10766 IEM_MC_LOCAL(uint32_t, u32Value);
10767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10770 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10771 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10772 IEM_MC_ADVANCE_RIP();
10773 IEM_MC_END();
10774 }
10775 else
10776 {
10777 IEM_MC_BEGIN(0, 2);
10778 IEM_MC_LOCAL(uint64_t, u64Value);
10779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10782 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10783 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10784 IEM_MC_ADVANCE_RIP();
10785 IEM_MC_END();
10786 }
10787 }
10788 return VINF_SUCCESS;
10789}
10790
10791
10792/** Opcode 0x0f 0xc0. */
10793FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
10794{
10795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10796 IEMOP_HLP_MIN_486();
10797 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
10798
10799 /*
10800 * If rm is denoting a register, no more instruction bytes.
10801 */
10802 if (IEM_IS_MODRM_REG_MODE(bRm))
10803 {
10804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10805
10806 IEM_MC_BEGIN(3, 0);
10807 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10808 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10809 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10810
10811 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10812 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10813 IEM_MC_REF_EFLAGS(pEFlags);
10814 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
10815
10816 IEM_MC_ADVANCE_RIP();
10817 IEM_MC_END();
10818 }
10819 else
10820 {
10821 /*
10822 * We're accessing memory.
10823 */
10824 IEM_MC_BEGIN(3, 3);
10825 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10826 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10827 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10828 IEM_MC_LOCAL(uint8_t, u8RegCopy);
10829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10830
10831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10832 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10833 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10834 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
10835 IEM_MC_FETCH_EFLAGS(EFlags);
10836 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10837 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
10838 else
10839 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
10840
10841 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10842 IEM_MC_COMMIT_EFLAGS(EFlags);
10843 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
10844 IEM_MC_ADVANCE_RIP();
10845 IEM_MC_END();
10846 return VINF_SUCCESS;
10847 }
10848 return VINF_SUCCESS;
10849}
10850
10851
10852/** Opcode 0x0f 0xc1. */
10853FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
10854{
10855 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
10856 IEMOP_HLP_MIN_486();
10857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10858
10859 /*
10860 * If rm is denoting a register, no more instruction bytes.
10861 */
10862 if (IEM_IS_MODRM_REG_MODE(bRm))
10863 {
10864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10865
10866 switch (pVCpu->iem.s.enmEffOpSize)
10867 {
10868 case IEMMODE_16BIT:
10869 IEM_MC_BEGIN(3, 0);
10870 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10871 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10872 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10873
10874 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10875 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10876 IEM_MC_REF_EFLAGS(pEFlags);
10877 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
10878
10879 IEM_MC_ADVANCE_RIP();
10880 IEM_MC_END();
10881 return VINF_SUCCESS;
10882
10883 case IEMMODE_32BIT:
10884 IEM_MC_BEGIN(3, 0);
10885 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10886 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10887 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10888
10889 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10890 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10891 IEM_MC_REF_EFLAGS(pEFlags);
10892 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
10893
10894 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10895 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10896 IEM_MC_ADVANCE_RIP();
10897 IEM_MC_END();
10898 return VINF_SUCCESS;
10899
10900 case IEMMODE_64BIT:
10901 IEM_MC_BEGIN(3, 0);
10902 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10903 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10904 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10905
10906 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10907 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10908 IEM_MC_REF_EFLAGS(pEFlags);
10909 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
10910
10911 IEM_MC_ADVANCE_RIP();
10912 IEM_MC_END();
10913 return VINF_SUCCESS;
10914
10915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10916 }
10917 }
10918 else
10919 {
10920 /*
10921 * We're accessing memory.
10922 */
10923 switch (pVCpu->iem.s.enmEffOpSize)
10924 {
10925 case IEMMODE_16BIT:
10926 IEM_MC_BEGIN(3, 3);
10927 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10928 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10929 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10930 IEM_MC_LOCAL(uint16_t, u16RegCopy);
10931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10932
10933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10934 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10935 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10936 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
10937 IEM_MC_FETCH_EFLAGS(EFlags);
10938 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10939 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
10940 else
10941 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
10942
10943 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10944 IEM_MC_COMMIT_EFLAGS(EFlags);
10945 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
10946 IEM_MC_ADVANCE_RIP();
10947 IEM_MC_END();
10948 return VINF_SUCCESS;
10949
10950 case IEMMODE_32BIT:
10951 IEM_MC_BEGIN(3, 3);
10952 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10953 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10954 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10955 IEM_MC_LOCAL(uint32_t, u32RegCopy);
10956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10957
10958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10959 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10960 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10961 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
10962 IEM_MC_FETCH_EFLAGS(EFlags);
10963 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10964 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
10965 else
10966 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
10967
10968 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10969 IEM_MC_COMMIT_EFLAGS(EFlags);
10970 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
10971 IEM_MC_ADVANCE_RIP();
10972 IEM_MC_END();
10973 return VINF_SUCCESS;
10974
10975 case IEMMODE_64BIT:
10976 IEM_MC_BEGIN(3, 3);
10977 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10978 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10979 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10980 IEM_MC_LOCAL(uint64_t, u64RegCopy);
10981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10982
10983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10984 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10985 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10986 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
10987 IEM_MC_FETCH_EFLAGS(EFlags);
10988 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10989 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
10990 else
10991 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
10992
10993 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10994 IEM_MC_COMMIT_EFLAGS(EFlags);
10995 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
10996 IEM_MC_ADVANCE_RIP();
10997 IEM_MC_END();
10998 return VINF_SUCCESS;
10999
11000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11001 }
11002 }
11003}
11004
11005
11006/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11007FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11008{
11009 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11010
11011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11012 if (IEM_IS_MODRM_REG_MODE(bRm))
11013 {
11014 /*
11015 * Register, register.
11016 */
11017 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11019 IEM_MC_BEGIN(4, 2);
11020 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11021 IEM_MC_LOCAL(X86XMMREG, Dst);
11022 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11023 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11024 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11025 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11026 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11027 IEM_MC_PREPARE_SSE_USAGE();
11028 IEM_MC_REF_MXCSR(pfMxcsr);
11029 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11030 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11031 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11032 IEM_MC_IF_MXCSR_XCPT_PENDING()
11033 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11034 IEM_MC_ELSE()
11035 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11036 IEM_MC_ENDIF();
11037
11038 IEM_MC_ADVANCE_RIP();
11039 IEM_MC_END();
11040 }
11041 else
11042 {
11043 /*
11044 * Register, memory.
11045 */
11046 IEM_MC_BEGIN(4, 3);
11047 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11048 IEM_MC_LOCAL(X86XMMREG, Dst);
11049 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11050 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11051 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11053
11054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11055 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11056 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11058 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11059 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11060
11061 IEM_MC_PREPARE_SSE_USAGE();
11062 IEM_MC_REF_MXCSR(pfMxcsr);
11063 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11064 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11065 IEM_MC_IF_MXCSR_XCPT_PENDING()
11066 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11067 IEM_MC_ELSE()
11068 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11069 IEM_MC_ENDIF();
11070
11071 IEM_MC_ADVANCE_RIP();
11072 IEM_MC_END();
11073 }
11074 return VINF_SUCCESS;
11075}
11076
11077
11078/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11079FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11080{
11081 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11082
11083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11084 if (IEM_IS_MODRM_REG_MODE(bRm))
11085 {
11086 /*
11087 * Register, register.
11088 */
11089 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11091 IEM_MC_BEGIN(4, 2);
11092 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11093 IEM_MC_LOCAL(X86XMMREG, Dst);
11094 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11095 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11096 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11097 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11098 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11099 IEM_MC_PREPARE_SSE_USAGE();
11100 IEM_MC_REF_MXCSR(pfMxcsr);
11101 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11102 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11103 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11104 IEM_MC_IF_MXCSR_XCPT_PENDING()
11105 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11106 IEM_MC_ELSE()
11107 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11108 IEM_MC_ENDIF();
11109
11110 IEM_MC_ADVANCE_RIP();
11111 IEM_MC_END();
11112 }
11113 else
11114 {
11115 /*
11116 * Register, memory.
11117 */
11118 IEM_MC_BEGIN(4, 3);
11119 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11120 IEM_MC_LOCAL(X86XMMREG, Dst);
11121 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11122 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11123 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11125
11126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11127 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11128 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11130 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11131 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11132
11133 IEM_MC_PREPARE_SSE_USAGE();
11134 IEM_MC_REF_MXCSR(pfMxcsr);
11135 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11136 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11137 IEM_MC_IF_MXCSR_XCPT_PENDING()
11138 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11139 IEM_MC_ELSE()
11140 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11141 IEM_MC_ENDIF();
11142
11143 IEM_MC_ADVANCE_RIP();
11144 IEM_MC_END();
11145 }
11146 return VINF_SUCCESS;
11147}
11148
11149
11150/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11151FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11152{
11153 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11154
11155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11156 if (IEM_IS_MODRM_REG_MODE(bRm))
11157 {
11158 /*
11159 * Register, register.
11160 */
11161 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11163 IEM_MC_BEGIN(4, 2);
11164 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11165 IEM_MC_LOCAL(X86XMMREG, Dst);
11166 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11167 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11168 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11169 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11171 IEM_MC_PREPARE_SSE_USAGE();
11172 IEM_MC_REF_MXCSR(pfMxcsr);
11173 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11174 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11175 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11176 IEM_MC_IF_MXCSR_XCPT_PENDING()
11177 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11178 IEM_MC_ELSE()
11179 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11180 IEM_MC_ENDIF();
11181
11182 IEM_MC_ADVANCE_RIP();
11183 IEM_MC_END();
11184 }
11185 else
11186 {
11187 /*
11188 * Register, memory.
11189 */
11190 IEM_MC_BEGIN(4, 3);
11191 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11192 IEM_MC_LOCAL(X86XMMREG, Dst);
11193 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11194 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11195 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11197
11198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11199 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11200 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11202 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11203 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11204
11205 IEM_MC_PREPARE_SSE_USAGE();
11206 IEM_MC_REF_MXCSR(pfMxcsr);
11207 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11208 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11209 IEM_MC_IF_MXCSR_XCPT_PENDING()
11210 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11211 IEM_MC_ELSE()
11212 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11213 IEM_MC_ENDIF();
11214
11215 IEM_MC_ADVANCE_RIP();
11216 IEM_MC_END();
11217 }
11218 return VINF_SUCCESS;
11219}
11220
11221
11222/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11223FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11224{
11225 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11226
11227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11228 if (IEM_IS_MODRM_REG_MODE(bRm))
11229 {
11230 /*
11231 * Register, register.
11232 */
11233 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11235 IEM_MC_BEGIN(4, 2);
11236 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11237 IEM_MC_LOCAL(X86XMMREG, Dst);
11238 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11239 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11240 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11241 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11242 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11243 IEM_MC_PREPARE_SSE_USAGE();
11244 IEM_MC_REF_MXCSR(pfMxcsr);
11245 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11246 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11247 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11248 IEM_MC_IF_MXCSR_XCPT_PENDING()
11249 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11250 IEM_MC_ELSE()
11251 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11252 IEM_MC_ENDIF();
11253
11254 IEM_MC_ADVANCE_RIP();
11255 IEM_MC_END();
11256 }
11257 else
11258 {
11259 /*
11260 * Register, memory.
11261 */
11262 IEM_MC_BEGIN(4, 3);
11263 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11264 IEM_MC_LOCAL(X86XMMREG, Dst);
11265 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11266 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11267 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11269
11270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11271 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11272 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11275 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11276
11277 IEM_MC_PREPARE_SSE_USAGE();
11278 IEM_MC_REF_MXCSR(pfMxcsr);
11279 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11280 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11281 IEM_MC_IF_MXCSR_XCPT_PENDING()
11282 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11283 IEM_MC_ELSE()
11284 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11285 IEM_MC_ENDIF();
11286
11287 IEM_MC_ADVANCE_RIP();
11288 IEM_MC_END();
11289 }
11290 return VINF_SUCCESS;
11291}
11292
11293
11294/** Opcode 0x0f 0xc3. */
11295FNIEMOP_DEF(iemOp_movnti_My_Gy)
11296{
11297 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11298
11299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11300
11301 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11302 if (IEM_IS_MODRM_MEM_MODE(bRm))
11303 {
11304 switch (pVCpu->iem.s.enmEffOpSize)
11305 {
11306 case IEMMODE_32BIT:
11307 IEM_MC_BEGIN(0, 2);
11308 IEM_MC_LOCAL(uint32_t, u32Value);
11309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11310
11311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11313 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11314 return IEMOP_RAISE_INVALID_OPCODE();
11315
11316 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11317 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11318 IEM_MC_ADVANCE_RIP();
11319 IEM_MC_END();
11320 break;
11321
11322 case IEMMODE_64BIT:
11323 IEM_MC_BEGIN(0, 2);
11324 IEM_MC_LOCAL(uint64_t, u64Value);
11325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11326
11327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11329 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11330 return IEMOP_RAISE_INVALID_OPCODE();
11331
11332 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11333 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11334 IEM_MC_ADVANCE_RIP();
11335 IEM_MC_END();
11336 break;
11337
11338 case IEMMODE_16BIT:
11339 /** @todo check this form. */
11340 return IEMOP_RAISE_INVALID_OPCODE();
11341 }
11342 }
11343 else
11344 return IEMOP_RAISE_INVALID_OPCODE();
11345 return VINF_SUCCESS;
11346}
11347
11348
11349/* Opcode 0x66 0x0f 0xc3 - invalid */
11350/* Opcode 0xf3 0x0f 0xc3 - invalid */
11351/* Opcode 0xf2 0x0f 0xc3 - invalid */
11352
11353
11354/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11355FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11356{
11357 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11359 if (IEM_IS_MODRM_REG_MODE(bRm))
11360 {
11361 /*
11362 * Register, register.
11363 */
11364 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11366 IEM_MC_BEGIN(3, 0);
11367 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11368 IEM_MC_ARG(uint16_t, u16Src, 1);
11369 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11370 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11371 IEM_MC_PREPARE_FPU_USAGE();
11372 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11373 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11374 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11375 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11376 IEM_MC_FPU_TO_MMX_MODE();
11377 IEM_MC_ADVANCE_RIP();
11378 IEM_MC_END();
11379 }
11380 else
11381 {
11382 /*
11383 * Register, memory.
11384 */
11385 IEM_MC_BEGIN(3, 2);
11386 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11387 IEM_MC_ARG(uint16_t, u16Src, 1);
11388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11389
11390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11391 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11392 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11394 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11395 IEM_MC_PREPARE_FPU_USAGE();
11396
11397 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11398 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11399 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11400 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11401 IEM_MC_FPU_TO_MMX_MODE();
11402 IEM_MC_ADVANCE_RIP();
11403 IEM_MC_END();
11404 }
11405 return VINF_SUCCESS;
11406}
11407
11408
11409/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11410FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11411{
11412 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11414 if (IEM_IS_MODRM_REG_MODE(bRm))
11415 {
11416 /*
11417 * Register, register.
11418 */
11419 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11421 IEM_MC_BEGIN(3, 0);
11422 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11423 IEM_MC_ARG(uint16_t, u16Src, 1);
11424 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11425 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11426 IEM_MC_PREPARE_SSE_USAGE();
11427 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11428 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11429 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11430 IEM_MC_ADVANCE_RIP();
11431 IEM_MC_END();
11432 }
11433 else
11434 {
11435 /*
11436 * Register, memory.
11437 */
11438 IEM_MC_BEGIN(3, 2);
11439 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11440 IEM_MC_ARG(uint16_t, u16Src, 1);
11441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11442
11443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11444 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11445 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11447 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11448 IEM_MC_PREPARE_SSE_USAGE();
11449
11450 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11451 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11452 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11453 IEM_MC_ADVANCE_RIP();
11454 IEM_MC_END();
11455 }
11456 return VINF_SUCCESS;
11457}
11458
11459
11460/* Opcode 0xf3 0x0f 0xc4 - invalid */
11461/* Opcode 0xf2 0x0f 0xc4 - invalid */
11462
11463
11464/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11465FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11466{
11467 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pinsrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);*/ /** @todo */
11468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11469 if (IEM_IS_MODRM_REG_MODE(bRm))
11470 {
11471 /*
11472 * Register, register.
11473 */
11474 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11476 IEM_MC_BEGIN(3, 1);
11477 IEM_MC_LOCAL(uint16_t, u16Dst);
11478 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11479 IEM_MC_ARG(uint64_t, u64Src, 1);
11480 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11482 IEM_MC_PREPARE_FPU_USAGE();
11483 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11484 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bEvilArg);
11485 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11486 IEM_MC_FPU_TO_MMX_MODE();
11487 IEM_MC_ADVANCE_RIP();
11488 IEM_MC_END();
11489 return VINF_SUCCESS;
11490 }
11491
11492 /* No memory operand. */
11493 return IEMOP_RAISE_INVALID_OPCODE();
11494}
11495
11496
11497/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11498FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11499{
11500 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11502 if (IEM_IS_MODRM_REG_MODE(bRm))
11503 {
11504 /*
11505 * Register, register.
11506 */
11507 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11509 IEM_MC_BEGIN(3, 1);
11510 IEM_MC_LOCAL(uint16_t, u16Dst);
11511 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11512 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11513 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11514 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11515 IEM_MC_PREPARE_SSE_USAGE();
11516 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11517 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bEvilArg);
11518 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11519 IEM_MC_ADVANCE_RIP();
11520 IEM_MC_END();
11521 return VINF_SUCCESS;
11522 }
11523
11524 /* No memory operand. */
11525 return IEMOP_RAISE_INVALID_OPCODE();
11526}
11527
11528
11529/* Opcode 0xf3 0x0f 0xc5 - invalid */
11530/* Opcode 0xf2 0x0f 0xc5 - invalid */
11531
11532
11533/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11534FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11535{
11536 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11538 if (IEM_IS_MODRM_REG_MODE(bRm))
11539 {
11540 /*
11541 * Register, register.
11542 */
11543 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11545 IEM_MC_BEGIN(3, 0);
11546 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11547 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11548 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11549 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11550 IEM_MC_PREPARE_SSE_USAGE();
11551 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11552 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11553 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11554 IEM_MC_ADVANCE_RIP();
11555 IEM_MC_END();
11556 }
11557 else
11558 {
11559 /*
11560 * Register, memory.
11561 */
11562 IEM_MC_BEGIN(3, 2);
11563 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11564 IEM_MC_LOCAL(RTUINT128U, uSrc);
11565 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11567
11568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11569 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11570 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11572 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11573 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11574
11575 IEM_MC_PREPARE_SSE_USAGE();
11576 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11577 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11578
11579 IEM_MC_ADVANCE_RIP();
11580 IEM_MC_END();
11581 }
11582 return VINF_SUCCESS;
11583}
11584
11585
11586/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
11587FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
11588{
11589 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11591 if (IEM_IS_MODRM_REG_MODE(bRm))
11592 {
11593 /*
11594 * Register, register.
11595 */
11596 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11598 IEM_MC_BEGIN(3, 0);
11599 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11600 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11601 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11602 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11603 IEM_MC_PREPARE_SSE_USAGE();
11604 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11605 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11606 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
11607 IEM_MC_ADVANCE_RIP();
11608 IEM_MC_END();
11609 }
11610 else
11611 {
11612 /*
11613 * Register, memory.
11614 */
11615 IEM_MC_BEGIN(3, 2);
11616 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11617 IEM_MC_LOCAL(RTUINT128U, uSrc);
11618 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11620
11621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11622 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11623 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11625 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11626 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11627
11628 IEM_MC_PREPARE_SSE_USAGE();
11629 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11630 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
11631
11632 IEM_MC_ADVANCE_RIP();
11633 IEM_MC_END();
11634 }
11635 return VINF_SUCCESS;
11636}
11637
11638
11639/* Opcode 0xf3 0x0f 0xc6 - invalid */
11640/* Opcode 0xf2 0x0f 0xc6 - invalid */
11641
11642
11643/** Opcode 0x0f 0xc7 !11/1. */
11644FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
11645{
11646 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
11647
11648 IEM_MC_BEGIN(4, 3);
11649 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
11650 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
11651 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
11652 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
11653 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
11654 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
11655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11656
11657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11658 IEMOP_HLP_DONE_DECODING();
11659 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11660
11661 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
11662 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
11663 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
11664
11665 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
11666 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
11667 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
11668
11669 IEM_MC_FETCH_EFLAGS(EFlags);
11670 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11671 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
11672 else
11673 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
11674
11675 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
11676 IEM_MC_COMMIT_EFLAGS(EFlags);
11677 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
11678 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
11679 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
11680 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
11681 IEM_MC_ENDIF();
11682 IEM_MC_ADVANCE_RIP();
11683
11684 IEM_MC_END();
11685 return VINF_SUCCESS;
11686}
11687
11688
11689/** Opcode REX.W 0x0f 0xc7 !11/1. */
11690FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
11691{
11692 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
11693 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
11694 {
11695#if 0
11696 RT_NOREF(bRm);
11697 IEMOP_BITCH_ABOUT_STUB();
11698 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
11699#else
11700 IEM_MC_BEGIN(4, 3);
11701 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
11702 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
11703 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
11704 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
11705 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
11706 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
11707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11708
11709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11710 IEMOP_HLP_DONE_DECODING();
11711 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
11712 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11713
11714 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
11715 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
11716 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
11717
11718 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
11719 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
11720 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
11721
11722 IEM_MC_FETCH_EFLAGS(EFlags);
11723# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
11724# if defined(RT_ARCH_AMD64)
11725 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
11726# endif
11727 {
11728 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11729 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11730 else
11731 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11732 }
11733# if defined(RT_ARCH_AMD64)
11734 else
11735# endif
11736# endif
11737# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
11738 {
11739 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
11740 accesses and not all all atomic, which works fine on in UNI CPU guest
11741 configuration (ignoring DMA). If guest SMP is active we have no choice
11742 but to use a rendezvous callback here. Sigh. */
11743 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
11744 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11745 else
11746 {
11747 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11748 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
11749 }
11750 }
11751# endif
11752
11753 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
11754 IEM_MC_COMMIT_EFLAGS(EFlags);
11755 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
11756 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
11757 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
11758 IEM_MC_ENDIF();
11759 IEM_MC_ADVANCE_RIP();
11760
11761 IEM_MC_END();
11762 return VINF_SUCCESS;
11763#endif
11764 }
11765 Log(("cmpxchg16b -> #UD\n"));
11766 return IEMOP_RAISE_INVALID_OPCODE();
11767}
11768
11769FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
11770{
11771 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
11772 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
11773 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
11774}
11775
11776/** Opcode 0x0f 0xc7 11/6. */
11777FNIEMOP_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
11778
11779/** Opcode 0x0f 0xc7 !11/6. */
11780#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11781FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
11782{
11783 IEMOP_MNEMONIC(vmptrld, "vmptrld");
11784 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
11785 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
11786 IEM_MC_BEGIN(2, 0);
11787 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11788 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
11789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11790 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
11791 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11792 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
11793 IEM_MC_END();
11794 return VINF_SUCCESS;
11795}
11796#else
11797FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
11798#endif
11799
11800/** Opcode 0x66 0x0f 0xc7 !11/6. */
11801#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11802FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
11803{
11804 IEMOP_MNEMONIC(vmclear, "vmclear");
11805 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
11806 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
11807 IEM_MC_BEGIN(2, 0);
11808 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11809 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
11810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11811 IEMOP_HLP_DONE_DECODING();
11812 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11813 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
11814 IEM_MC_END();
11815 return VINF_SUCCESS;
11816}
11817#else
11818FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
11819#endif
11820
11821/** Opcode 0xf3 0x0f 0xc7 !11/6. */
11822#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11823FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
11824{
11825 IEMOP_MNEMONIC(vmxon, "vmxon");
11826 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
11827 IEM_MC_BEGIN(2, 0);
11828 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11829 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
11830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11831 IEMOP_HLP_DONE_DECODING();
11832 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11833 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
11834 IEM_MC_END();
11835 return VINF_SUCCESS;
11836}
11837#else
11838FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
11839#endif
11840
11841/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
11842#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11843FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
11844{
11845 IEMOP_MNEMONIC(vmptrst, "vmptrst");
11846 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
11847 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
11848 IEM_MC_BEGIN(2, 0);
11849 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11850 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
11851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11852 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
11853 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11854 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
11855 IEM_MC_END();
11856 return VINF_SUCCESS;
11857}
11858#else
11859FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
11860#endif
11861
11862/** Opcode 0x0f 0xc7 11/7. */
11863FNIEMOP_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
11864
11865
11866/**
11867 * Group 9 jump table for register variant.
11868 */
11869IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
11870{ /* pfx: none, 066h, 0f3h, 0f2h */
11871 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
11872 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
11873 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
11874 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
11875 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
11876 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
11877 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11878 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11879};
11880AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
11881
11882
11883/**
11884 * Group 9 jump table for memory variant.
11885 */
11886IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
11887{ /* pfx: none, 066h, 0f3h, 0f2h */
11888 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
11889 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
11890 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
11891 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
11892 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
11893 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
11894 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
11895 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11896};
11897AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
11898
11899
11900/** Opcode 0x0f 0xc7. */
11901FNIEMOP_DEF(iemOp_Grp9)
11902{
11903 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
11904 if (IEM_IS_MODRM_REG_MODE(bRm))
11905 /* register, register */
11906 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
11907 + pVCpu->iem.s.idxPrefix], bRm);
11908 /* memory, register */
11909 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
11910 + pVCpu->iem.s.idxPrefix], bRm);
11911}
11912
11913
11914/**
11915 * Common 'bswap register' helper.
11916 */
11917FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
11918{
11919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11920 switch (pVCpu->iem.s.enmEffOpSize)
11921 {
11922 case IEMMODE_16BIT:
11923 IEM_MC_BEGIN(1, 0);
11924 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11925 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
11926 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
11927 IEM_MC_ADVANCE_RIP();
11928 IEM_MC_END();
11929 return VINF_SUCCESS;
11930
11931 case IEMMODE_32BIT:
11932 IEM_MC_BEGIN(1, 0);
11933 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11934 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
11935 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11936 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
11937 IEM_MC_ADVANCE_RIP();
11938 IEM_MC_END();
11939 return VINF_SUCCESS;
11940
11941 case IEMMODE_64BIT:
11942 IEM_MC_BEGIN(1, 0);
11943 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11944 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
11945 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
11946 IEM_MC_ADVANCE_RIP();
11947 IEM_MC_END();
11948 return VINF_SUCCESS;
11949
11950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11951 }
11952}
11953
11954
11955/** Opcode 0x0f 0xc8. */
11956FNIEMOP_DEF(iemOp_bswap_rAX_r8)
11957{
11958 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
11959 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
11960 prefix. REX.B is the correct prefix it appears. For a parallel
11961 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
11962 IEMOP_HLP_MIN_486();
11963 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
11964}
11965
11966
11967/** Opcode 0x0f 0xc9. */
11968FNIEMOP_DEF(iemOp_bswap_rCX_r9)
11969{
11970 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
11971 IEMOP_HLP_MIN_486();
11972 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
11973}
11974
11975
11976/** Opcode 0x0f 0xca. */
11977FNIEMOP_DEF(iemOp_bswap_rDX_r10)
11978{
11979 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
11980 IEMOP_HLP_MIN_486();
11981 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
11982}
11983
11984
11985/** Opcode 0x0f 0xcb. */
11986FNIEMOP_DEF(iemOp_bswap_rBX_r11)
11987{
11988 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
11989 IEMOP_HLP_MIN_486();
11990 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
11991}
11992
11993
11994/** Opcode 0x0f 0xcc. */
11995FNIEMOP_DEF(iemOp_bswap_rSP_r12)
11996{
11997 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
11998 IEMOP_HLP_MIN_486();
11999 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12000}
12001
12002
12003/** Opcode 0x0f 0xcd. */
12004FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12005{
12006 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12007 IEMOP_HLP_MIN_486();
12008 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12009}
12010
12011
12012/** Opcode 0x0f 0xce. */
12013FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12014{
12015 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12016 IEMOP_HLP_MIN_486();
12017 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12018}
12019
12020
12021/** Opcode 0x0f 0xcf. */
12022FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12023{
12024 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12025 IEMOP_HLP_MIN_486();
12026 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12027}
12028
12029
12030/* Opcode 0x0f 0xd0 - invalid */
12031
12032
12033/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12034FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12035{
12036 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
12037 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12038}
12039
12040
12041/* Opcode 0xf3 0x0f 0xd0 - invalid */
12042
12043
12044/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12045FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12046{
12047 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
12048 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12049}
12050
12051
12052
12053/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12054FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12055{
12056 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12057 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12058}
12059
12060/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12061FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12062{
12063 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12064 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12065}
12066
12067/* Opcode 0xf3 0x0f 0xd1 - invalid */
12068/* Opcode 0xf2 0x0f 0xd1 - invalid */
12069
12070/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12071FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12072{
12073 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12074 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12075}
12076
12077
12078/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12079FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12080{
12081 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12082 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12083}
12084
12085
12086/* Opcode 0xf3 0x0f 0xd2 - invalid */
12087/* Opcode 0xf2 0x0f 0xd2 - invalid */
12088
12089/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12090FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12091{
12092 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12093 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12094}
12095
12096
12097/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12098FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12099{
12100 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12101 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12102}
12103
12104
12105/* Opcode 0xf3 0x0f 0xd3 - invalid */
12106/* Opcode 0xf2 0x0f 0xd3 - invalid */
12107
12108
12109/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12110FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12111{
12112 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12113 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12114}
12115
12116
12117/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12118FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12119{
12120 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12121 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12122}
12123
12124
12125/* Opcode 0xf3 0x0f 0xd4 - invalid */
12126/* Opcode 0xf2 0x0f 0xd4 - invalid */
12127
12128/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12129FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12130{
12131 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12132 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12133}
12134
12135/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12136FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12137{
12138 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12139 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12140}
12141
12142
12143/* Opcode 0xf3 0x0f 0xd5 - invalid */
12144/* Opcode 0xf2 0x0f 0xd5 - invalid */
12145
12146/* Opcode 0x0f 0xd6 - invalid */
12147
12148/**
12149 * @opcode 0xd6
12150 * @oppfx 0x66
12151 * @opcpuid sse2
12152 * @opgroup og_sse2_pcksclr_datamove
12153 * @opxcpttype none
12154 * @optest op1=-1 op2=2 -> op1=2
12155 * @optest op1=0 op2=-42 -> op1=-42
12156 */
12157FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12158{
12159 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12161 if (IEM_IS_MODRM_REG_MODE(bRm))
12162 {
12163 /*
12164 * Register, register.
12165 */
12166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12167 IEM_MC_BEGIN(0, 2);
12168 IEM_MC_LOCAL(uint64_t, uSrc);
12169
12170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12172
12173 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12174 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12175
12176 IEM_MC_ADVANCE_RIP();
12177 IEM_MC_END();
12178 }
12179 else
12180 {
12181 /*
12182 * Memory, register.
12183 */
12184 IEM_MC_BEGIN(0, 2);
12185 IEM_MC_LOCAL(uint64_t, uSrc);
12186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12187
12188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12192
12193 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12194 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12195
12196 IEM_MC_ADVANCE_RIP();
12197 IEM_MC_END();
12198 }
12199 return VINF_SUCCESS;
12200}
12201
12202
12203/**
12204 * @opcode 0xd6
12205 * @opcodesub 11 mr/reg
12206 * @oppfx f3
12207 * @opcpuid sse2
12208 * @opgroup og_sse2_simdint_datamove
12209 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12210 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12211 */
12212FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12213{
12214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12215 if (IEM_IS_MODRM_REG_MODE(bRm))
12216 {
12217 /*
12218 * Register, register.
12219 */
12220 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12222 IEM_MC_BEGIN(0, 1);
12223 IEM_MC_LOCAL(uint64_t, uSrc);
12224
12225 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12226 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12227 IEM_MC_FPU_TO_MMX_MODE();
12228
12229 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12230 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12231
12232 IEM_MC_ADVANCE_RIP();
12233 IEM_MC_END();
12234 return VINF_SUCCESS;
12235 }
12236
12237 /**
12238 * @opdone
12239 * @opmnemonic udf30fd6mem
12240 * @opcode 0xd6
12241 * @opcodesub !11 mr/reg
12242 * @oppfx f3
12243 * @opunused intel-modrm
12244 * @opcpuid sse
12245 * @optest ->
12246 */
12247 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12248}
12249
12250
12251/**
12252 * @opcode 0xd6
12253 * @opcodesub 11 mr/reg
12254 * @oppfx f2
12255 * @opcpuid sse2
12256 * @opgroup og_sse2_simdint_datamove
12257 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12258 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12259 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12260 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12261 * @optest op1=-42 op2=0xfedcba9876543210
12262 * -> op1=0xfedcba9876543210 ftw=0xff
12263 */
12264FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12265{
12266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12267 if (IEM_IS_MODRM_REG_MODE(bRm))
12268 {
12269 /*
12270 * Register, register.
12271 */
12272 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12274 IEM_MC_BEGIN(0, 1);
12275 IEM_MC_LOCAL(uint64_t, uSrc);
12276
12277 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12278 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12279 IEM_MC_FPU_TO_MMX_MODE();
12280
12281 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12282 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12283
12284 IEM_MC_ADVANCE_RIP();
12285 IEM_MC_END();
12286 return VINF_SUCCESS;
12287 }
12288
12289 /**
12290 * @opdone
12291 * @opmnemonic udf20fd6mem
12292 * @opcode 0xd6
12293 * @opcodesub !11 mr/reg
12294 * @oppfx f2
12295 * @opunused intel-modrm
12296 * @opcpuid sse
12297 * @optest ->
12298 */
12299 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12300}
12301
12302
12303/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12304FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12305{
12306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12307 /* Docs says register only. */
12308 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12309 {
12310 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12311 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
12312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12313 IEM_MC_BEGIN(2, 0);
12314 IEM_MC_ARG(uint64_t *, puDst, 0);
12315 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12316 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12317 IEM_MC_PREPARE_FPU_USAGE();
12318 IEM_MC_FPU_TO_MMX_MODE();
12319
12320 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
12321 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12322 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12323
12324 IEM_MC_ADVANCE_RIP();
12325 IEM_MC_END();
12326 return VINF_SUCCESS;
12327 }
12328 return IEMOP_RAISE_INVALID_OPCODE();
12329}
12330
12331
12332/** Opcode 0x66 0x0f 0xd7 - */
12333FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12334{
12335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12336 /* Docs says register only. */
12337 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12338 {
12339 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12340 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
12341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12342 IEM_MC_BEGIN(2, 0);
12343 IEM_MC_ARG(uint64_t *, puDst, 0);
12344 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12345 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12346 IEM_MC_PREPARE_SSE_USAGE();
12347 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12348 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12349 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12350 IEM_MC_ADVANCE_RIP();
12351 IEM_MC_END();
12352 return VINF_SUCCESS;
12353 }
12354 return IEMOP_RAISE_INVALID_OPCODE();
12355}
12356
12357
12358/* Opcode 0xf3 0x0f 0xd7 - invalid */
12359/* Opcode 0xf2 0x0f 0xd7 - invalid */
12360
12361
12362/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12363FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12364{
12365 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12366 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12367}
12368
12369
12370/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12371FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12372{
12373 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12374 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12375}
12376
12377
12378/* Opcode 0xf3 0x0f 0xd8 - invalid */
12379/* Opcode 0xf2 0x0f 0xd8 - invalid */
12380
12381/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12382FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12383{
12384 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12385 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12386}
12387
12388
12389/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12390FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12391{
12392 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12393 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12394}
12395
12396
12397/* Opcode 0xf3 0x0f 0xd9 - invalid */
12398/* Opcode 0xf2 0x0f 0xd9 - invalid */
12399
12400/** Opcode 0x0f 0xda - pminub Pq, Qq */
12401FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12402{
12403 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12404 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12405}
12406
12407
12408/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12409FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12410{
12411 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12412 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12413}
12414
12415/* Opcode 0xf3 0x0f 0xda - invalid */
12416/* Opcode 0xf2 0x0f 0xda - invalid */
12417
12418/** Opcode 0x0f 0xdb - pand Pq, Qq */
12419FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12420{
12421 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12422 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12423}
12424
12425
12426/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12427FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12428{
12429 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12430 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12431}
12432
12433
12434/* Opcode 0xf3 0x0f 0xdb - invalid */
12435/* Opcode 0xf2 0x0f 0xdb - invalid */
12436
12437/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12438FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12439{
12440 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12441 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12442}
12443
12444
12445/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12446FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12447{
12448 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12449 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12450}
12451
12452
12453/* Opcode 0xf3 0x0f 0xdc - invalid */
12454/* Opcode 0xf2 0x0f 0xdc - invalid */
12455
12456/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12457FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12458{
12459 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12460 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12461}
12462
12463
12464/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
12465FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
12466{
12467 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12468 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
12469}
12470
12471
12472/* Opcode 0xf3 0x0f 0xdd - invalid */
12473/* Opcode 0xf2 0x0f 0xdd - invalid */
12474
12475/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
12476FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
12477{
12478 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12479 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
12480}
12481
12482
12483/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
12484FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
12485{
12486 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12487 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
12488}
12489
12490/* Opcode 0xf3 0x0f 0xde - invalid */
12491/* Opcode 0xf2 0x0f 0xde - invalid */
12492
12493
12494/** Opcode 0x0f 0xdf - pandn Pq, Qq */
12495FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
12496{
12497 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12498 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
12499}
12500
12501
12502/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
12503FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
12504{
12505 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12506 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
12507}
12508
12509
12510/* Opcode 0xf3 0x0f 0xdf - invalid */
12511/* Opcode 0xf2 0x0f 0xdf - invalid */
12512
12513/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
12514FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
12515{
12516 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12517 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
12518}
12519
12520
12521/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
12522FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
12523{
12524 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12525 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
12526}
12527
12528
12529/* Opcode 0xf3 0x0f 0xe0 - invalid */
12530/* Opcode 0xf2 0x0f 0xe0 - invalid */
12531
12532/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
12533FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
12534{
12535 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12536 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
12537}
12538
12539
12540/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
12541FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
12542{
12543 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12544 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
12545}
12546
12547
12548/* Opcode 0xf3 0x0f 0xe1 - invalid */
12549/* Opcode 0xf2 0x0f 0xe1 - invalid */
12550
12551/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
12552FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
12553{
12554 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12555 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
12556}
12557
12558
12559/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
12560FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
12561{
12562 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12563 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
12564}
12565
12566
12567/* Opcode 0xf3 0x0f 0xe2 - invalid */
12568/* Opcode 0xf2 0x0f 0xe2 - invalid */
12569
12570/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
12571FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
12572{
12573 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12574 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
12575}
12576
12577
12578/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
12579FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
12580{
12581 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12582 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
12583}
12584
12585
12586/* Opcode 0xf3 0x0f 0xe3 - invalid */
12587/* Opcode 0xf2 0x0f 0xe3 - invalid */
12588
12589/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
12590FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
12591{
12592 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12593 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
12594}
12595
12596
12597/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
12598FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
12599{
12600 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12601 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
12602}
12603
12604
12605/* Opcode 0xf3 0x0f 0xe4 - invalid */
12606/* Opcode 0xf2 0x0f 0xe4 - invalid */
12607
12608/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
12609FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
12610{
12611 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12612 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
12613}
12614
12615
12616/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
12617FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
12618{
12619 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12620 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
12621}
12622
12623
12624/* Opcode 0xf3 0x0f 0xe5 - invalid */
12625/* Opcode 0xf2 0x0f 0xe5 - invalid */
12626/* Opcode 0x0f 0xe6 - invalid */
12627
12628
12629/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
12630FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
12631{
12632 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12633 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
12634}
12635
12636
12637/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
12638FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
12639{
12640 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12641 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
12642}
12643
12644
12645/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
12646FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
12647{
12648 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12649 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
12650}
12651
12652
12653/**
12654 * @opcode 0xe7
12655 * @opcodesub !11 mr/reg
12656 * @oppfx none
12657 * @opcpuid sse
12658 * @opgroup og_sse1_cachect
12659 * @opxcpttype none
12660 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
12661 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12662 */
12663FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
12664{
12665 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12667 if (IEM_IS_MODRM_MEM_MODE(bRm))
12668 {
12669 /* Register, memory. */
12670 IEM_MC_BEGIN(0, 2);
12671 IEM_MC_LOCAL(uint64_t, uSrc);
12672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12673
12674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12676 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12677 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12678 IEM_MC_FPU_TO_MMX_MODE();
12679
12680 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
12681 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12682
12683 IEM_MC_ADVANCE_RIP();
12684 IEM_MC_END();
12685 return VINF_SUCCESS;
12686 }
12687 /**
12688 * @opdone
12689 * @opmnemonic ud0fe7reg
12690 * @opcode 0xe7
12691 * @opcodesub 11 mr/reg
12692 * @oppfx none
12693 * @opunused immediate
12694 * @opcpuid sse
12695 * @optest ->
12696 */
12697 return IEMOP_RAISE_INVALID_OPCODE();
12698}
12699
12700/**
12701 * @opcode 0xe7
12702 * @opcodesub !11 mr/reg
12703 * @oppfx 0x66
12704 * @opcpuid sse2
12705 * @opgroup og_sse2_cachect
12706 * @opxcpttype 1
12707 * @optest op1=-1 op2=2 -> op1=2
12708 * @optest op1=0 op2=-42 -> op1=-42
12709 */
12710FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
12711{
12712 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12714 if (IEM_IS_MODRM_MEM_MODE(bRm))
12715 {
12716 /* Register, memory. */
12717 IEM_MC_BEGIN(0, 2);
12718 IEM_MC_LOCAL(RTUINT128U, uSrc);
12719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12720
12721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12723 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12724 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12725
12726 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12727 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12728
12729 IEM_MC_ADVANCE_RIP();
12730 IEM_MC_END();
12731 return VINF_SUCCESS;
12732 }
12733
12734 /**
12735 * @opdone
12736 * @opmnemonic ud660fe7reg
12737 * @opcode 0xe7
12738 * @opcodesub 11 mr/reg
12739 * @oppfx 0x66
12740 * @opunused immediate
12741 * @opcpuid sse
12742 * @optest ->
12743 */
12744 return IEMOP_RAISE_INVALID_OPCODE();
12745}
12746
12747/* Opcode 0xf3 0x0f 0xe7 - invalid */
12748/* Opcode 0xf2 0x0f 0xe7 - invalid */
12749
12750
12751/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
12752FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
12753{
12754 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12755 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
12756}
12757
12758
12759/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
12760FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
12761{
12762 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12763 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
12764}
12765
12766
12767/* Opcode 0xf3 0x0f 0xe8 - invalid */
12768/* Opcode 0xf2 0x0f 0xe8 - invalid */
12769
12770/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
12771FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
12772{
12773 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12774 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
12775}
12776
12777
12778/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
12779FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
12780{
12781 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12782 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
12783}
12784
12785
12786/* Opcode 0xf3 0x0f 0xe9 - invalid */
12787/* Opcode 0xf2 0x0f 0xe9 - invalid */
12788
12789
12790/** Opcode 0x0f 0xea - pminsw Pq, Qq */
12791FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
12792{
12793 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12794 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
12795}
12796
12797
12798/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
12799FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
12800{
12801 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12802 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
12803}
12804
12805
12806/* Opcode 0xf3 0x0f 0xea - invalid */
12807/* Opcode 0xf2 0x0f 0xea - invalid */
12808
12809
12810/** Opcode 0x0f 0xeb - por Pq, Qq */
12811FNIEMOP_DEF(iemOp_por_Pq_Qq)
12812{
12813 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12814 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
12815}
12816
12817
12818/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
12819FNIEMOP_DEF(iemOp_por_Vx_Wx)
12820{
12821 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12822 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
12823}
12824
12825
12826/* Opcode 0xf3 0x0f 0xeb - invalid */
12827/* Opcode 0xf2 0x0f 0xeb - invalid */
12828
12829/** Opcode 0x0f 0xec - paddsb Pq, Qq */
12830FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
12831{
12832 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12833 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
12834}
12835
12836
12837/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
12838FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
12839{
12840 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12841 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
12842}
12843
12844
12845/* Opcode 0xf3 0x0f 0xec - invalid */
12846/* Opcode 0xf2 0x0f 0xec - invalid */
12847
12848/** Opcode 0x0f 0xed - paddsw Pq, Qq */
12849FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
12850{
12851 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12852 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
12853}
12854
12855
12856/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
12857FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
12858{
12859 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12860 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
12861}
12862
12863
12864/* Opcode 0xf3 0x0f 0xed - invalid */
12865/* Opcode 0xf2 0x0f 0xed - invalid */
12866
12867
12868/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
12869FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
12870{
12871 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12872 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
12873}
12874
12875
12876/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
12877FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
12878{
12879 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12880 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
12881}
12882
12883
12884/* Opcode 0xf3 0x0f 0xee - invalid */
12885/* Opcode 0xf2 0x0f 0xee - invalid */
12886
12887
12888/** Opcode 0x0f 0xef - pxor Pq, Qq */
12889FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
12890{
12891 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12892 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
12893}
12894
12895
12896/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
12897FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
12898{
12899 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12900 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
12901}
12902
12903
12904/* Opcode 0xf3 0x0f 0xef - invalid */
12905/* Opcode 0xf2 0x0f 0xef - invalid */
12906
12907/* Opcode 0x0f 0xf0 - invalid */
12908/* Opcode 0x66 0x0f 0xf0 - invalid */
12909
12910
12911/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
12912FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
12913{
12914 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12916 if (IEM_IS_MODRM_REG_MODE(bRm))
12917 {
12918 /*
12919 * Register, register - (not implemented, assuming it raises \#UD).
12920 */
12921 return IEMOP_RAISE_INVALID_OPCODE();
12922 }
12923 else
12924 {
12925 /*
12926 * Register, memory.
12927 */
12928 IEM_MC_BEGIN(0, 2);
12929 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
12930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12931
12932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12934 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
12935 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12936 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12937 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
12938
12939 IEM_MC_ADVANCE_RIP();
12940 IEM_MC_END();
12941 }
12942 return VINF_SUCCESS;
12943}
12944
12945
12946/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
12947FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
12948{
12949 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12950 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
12951}
12952
12953
12954/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
12955FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
12956{
12957 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12958 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
12959}
12960
12961
12962/* Opcode 0xf2 0x0f 0xf1 - invalid */
12963
12964/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
12965FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
12966{
12967 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12968 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
12969}
12970
12971
12972/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
12973FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
12974{
12975 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12976 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
12977}
12978
12979
12980/* Opcode 0xf2 0x0f 0xf2 - invalid */
12981
12982/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
12983FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
12984{
12985 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12986 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
12987}
12988
12989
12990/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
12991FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
12992{
12993 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12994 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
12995}
12996
12997/* Opcode 0xf2 0x0f 0xf3 - invalid */
12998
12999/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13000FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13001{
13002 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13003 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13004}
13005
13006
13007/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13008FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13009{
13010 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13011 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13012}
13013
13014
13015/* Opcode 0xf2 0x0f 0xf4 - invalid */
13016
13017/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13018FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13019{
13020 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13021 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13022}
13023
13024
13025/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13026FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13027{
13028 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13029 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13030}
13031
13032/* Opcode 0xf2 0x0f 0xf5 - invalid */
13033
13034/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13035FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13036{
13037 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13038 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13039}
13040
13041
13042/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13043FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13044{
13045 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13046 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13047}
13048
13049
13050/* Opcode 0xf2 0x0f 0xf6 - invalid */
13051
13052/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13053FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13054/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13055FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13056/* Opcode 0xf2 0x0f 0xf7 - invalid */
13057
13058
13059/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13060FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13061{
13062 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13063 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13064}
13065
13066
13067/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13068FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13069{
13070 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13071 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13072}
13073
13074
13075/* Opcode 0xf2 0x0f 0xf8 - invalid */
13076
13077
13078/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13079FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13080{
13081 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13082 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13083}
13084
13085
13086/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13087FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13088{
13089 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13090 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13091}
13092
13093
13094/* Opcode 0xf2 0x0f 0xf9 - invalid */
13095
13096
13097/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13098FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13099{
13100 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13101 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13102}
13103
13104
13105/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13106FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13107{
13108 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13109 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13110}
13111
13112
13113/* Opcode 0xf2 0x0f 0xfa - invalid */
13114
13115
13116/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13117FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13118{
13119 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13120 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13121}
13122
13123
13124/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13125FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13126{
13127 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13128 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13129}
13130
13131
13132/* Opcode 0xf2 0x0f 0xfb - invalid */
13133
13134
13135/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13136FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13137{
13138 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13139 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13140}
13141
13142
13143/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13144FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13145{
13146 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13147 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13148}
13149
13150
13151/* Opcode 0xf2 0x0f 0xfc - invalid */
13152
13153
13154/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13155FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13156{
13157 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13158 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13159}
13160
13161
13162/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13163FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13164{
13165 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13166 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13167}
13168
13169
13170/* Opcode 0xf2 0x0f 0xfd - invalid */
13171
13172
13173/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13174FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13175{
13176 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13177 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13178}
13179
13180
13181/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13182FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13183{
13184 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13185 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13186}
13187
13188
13189/* Opcode 0xf2 0x0f 0xfe - invalid */
13190
13191
13192/** Opcode **** 0x0f 0xff - UD0 */
13193FNIEMOP_DEF(iemOp_ud0)
13194{
13195 IEMOP_MNEMONIC(ud0, "ud0");
13196 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13197 {
13198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13199#ifndef TST_IEM_CHECK_MC
13200 if (IEM_IS_MODRM_MEM_MODE(bRm))
13201 {
13202 RTGCPTR GCPtrEff;
13203 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13204 if (rcStrict != VINF_SUCCESS)
13205 return rcStrict;
13206 }
13207#endif
13208 IEMOP_HLP_DONE_DECODING();
13209 }
13210 return IEMOP_RAISE_INVALID_OPCODE();
13211}
13212
13213
13214
13215/**
13216 * Two byte opcode map, first byte 0x0f.
13217 *
13218 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13219 * check if it needs updating as well when making changes.
13220 */
13221IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13222{
13223 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13224 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13225 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13226 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13227 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13228 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13229 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13230 /* 0x06 */ IEMOP_X4(iemOp_clts),
13231 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13232 /* 0x08 */ IEMOP_X4(iemOp_invd),
13233 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13234 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13235 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13236 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13237 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13238 /* 0x0e */ IEMOP_X4(iemOp_femms),
13239 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13240
13241 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13242 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13243 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13244 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13245 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13246 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13247 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13248 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13249 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13250 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13251 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13252 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13253 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13254 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13255 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13256 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13257
13258 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13259 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13260 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13261 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13262 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13263 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13264 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13265 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13266 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13267 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13268 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13269 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13270 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13271 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13272 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13273 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13274
13275 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13276 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13277 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13278 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13279 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13280 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13281 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13282 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13283 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13284 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13285 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13286 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13287 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13288 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13289 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13290 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13291
13292 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13293 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13294 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13295 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13296 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13297 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13298 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13299 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13300 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13301 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13302 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13303 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13304 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13305 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13306 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13307 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13308
13309 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13310 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13311 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13312 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13313 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13314 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13315 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13316 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13317 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13318 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13319 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13320 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13321 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13322 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13323 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13324 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13325
13326 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13327 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13328 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13329 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13330 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13331 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13332 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13333 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13334 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13335 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13336 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13337 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13338 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13339 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13340 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13341 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13342
13343 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13344 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13345 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13346 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13347 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13348 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13349 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13350 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13351
13352 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13353 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13354 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13355 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13356 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13357 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13358 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13359 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13360
13361 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13362 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13363 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13364 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13365 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13366 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13367 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13368 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13369 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13370 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13371 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13372 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13373 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13374 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13375 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13376 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13377
13378 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13379 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13380 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13381 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13382 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13383 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13384 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13385 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13386 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13387 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13388 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13389 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13390 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13391 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13392 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13393 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13394
13395 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13396 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13397 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13398 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13399 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13400 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13401 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13402 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13403 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13404 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13405 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13406 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13407 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13408 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13409 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13410 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13411
13412 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13413 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13414 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13415 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13416 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13417 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13418 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13419 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13420 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13421 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13422 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13423 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13424 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13425 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13426 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13427 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13428
13429 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13430 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13431 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13432 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13433 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13434 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13435 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13436 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13437 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13438 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13439 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13440 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13441 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13442 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13443 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13444 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13445
13446 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13447 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13448 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13449 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13450 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13451 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13452 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13453 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13454 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13455 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13456 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13457 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13458 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13459 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13460 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13461 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13462
13463 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13464 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13465 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13466 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13467 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13468 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13469 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13470 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13471 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13472 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13473 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13474 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13475 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13476 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13477 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13478 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13479
13480 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
13481 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13482 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13483 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13484 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13485 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13486 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13487 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13488 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13489 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13490 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13491 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13492 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13493 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13494 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13495 /* 0xff */ IEMOP_X4(iemOp_ud0),
13496};
13497AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
13498
13499/** @} */
13500
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette