VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96945

Last change on this file since 96945 was 96943, checked in by vboxsync, 2 years ago

VMM/IEM: Addendum to r153847 (VMM/IEM: Fetch EFLAGS before calling assembly in iemOp_ucomisd_Vsd_Wsd. bugref:9898), fix the other comis variants as well, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 475.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96943 2022-09-30 06:32:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * Register, register.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * Register, memory.
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP();
92 IEM_MC_END();
93 }
94 return VINF_SUCCESS;
95}
96
97
98/**
99 * Common worker for MMX instructions on the form:
100 * pxxx mm1, mm2/mem64
101 *
102 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
103 * no FXSAVE state, just the operands.
104 */
105FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
106{
107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
108 if (IEM_IS_MODRM_REG_MODE(bRm))
109 {
110 /*
111 * Register, register.
112 */
113 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
114 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
116 IEM_MC_BEGIN(2, 0);
117 IEM_MC_ARG(uint64_t *, pDst, 0);
118 IEM_MC_ARG(uint64_t const *, pSrc, 1);
119 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
120 IEM_MC_PREPARE_FPU_USAGE();
121 IEM_MC_FPU_TO_MMX_MODE();
122
123 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
124 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
125 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
126 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
127
128 IEM_MC_ADVANCE_RIP();
129 IEM_MC_END();
130 }
131 else
132 {
133 /*
134 * Register, memory.
135 */
136 IEM_MC_BEGIN(2, 2);
137 IEM_MC_ARG(uint64_t *, pDst, 0);
138 IEM_MC_LOCAL(uint64_t, uSrc);
139 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
141
142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
144 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
145 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
146
147 IEM_MC_PREPARE_FPU_USAGE();
148 IEM_MC_FPU_TO_MMX_MODE();
149
150 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
151 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
152 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
153
154 IEM_MC_ADVANCE_RIP();
155 IEM_MC_END();
156 }
157 return VINF_SUCCESS;
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * for instructions introduced with SSE.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * Register, register.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
177 IEM_MC_BEGIN(2, 0);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * Register, memory.
196 */
197 IEM_MC_BEGIN(2, 2);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 }
218 return VINF_SUCCESS;
219}
220
221
222/**
223 * Common worker for MMX instructions on the form:
224 * pxxx mm1, mm2/mem64
225 * for instructions introduced with SSE.
226 *
227 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
228 * no FXSAVE state, just the operands.
229 */
230FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
231{
232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
233 if (IEM_IS_MODRM_REG_MODE(bRm))
234 {
235 /*
236 * Register, register.
237 */
238 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
239 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
241 IEM_MC_BEGIN(2, 0);
242 IEM_MC_ARG(uint64_t *, pDst, 0);
243 IEM_MC_ARG(uint64_t const *, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
245 IEM_MC_PREPARE_FPU_USAGE();
246 IEM_MC_FPU_TO_MMX_MODE();
247
248 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
249 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
250 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
251 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
252
253 IEM_MC_ADVANCE_RIP();
254 IEM_MC_END();
255 }
256 else
257 {
258 /*
259 * Register, memory.
260 */
261 IEM_MC_BEGIN(2, 2);
262 IEM_MC_ARG(uint64_t *, pDst, 0);
263 IEM_MC_LOCAL(uint64_t, uSrc);
264 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
266
267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
269 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
270 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
271
272 IEM_MC_PREPARE_FPU_USAGE();
273 IEM_MC_FPU_TO_MMX_MODE();
274
275 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
276 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
277 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
278
279 IEM_MC_ADVANCE_RIP();
280 IEM_MC_END();
281 }
282 return VINF_SUCCESS;
283}
284
285
286/**
287 * Common worker for MMX instructions on the form:
288 * pxxx mm1, mm2/mem64
289 * that was introduced with SSE2.
290 */
291FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
292{
293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
294 if (IEM_IS_MODRM_REG_MODE(bRm))
295 {
296 /*
297 * Register, register.
298 */
299 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
300 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_BEGIN(2, 0);
303 IEM_MC_ARG(uint64_t *, pDst, 0);
304 IEM_MC_ARG(uint64_t const *, pSrc, 1);
305 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
306 IEM_MC_PREPARE_FPU_USAGE();
307 IEM_MC_FPU_TO_MMX_MODE();
308
309 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
310 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313
314 IEM_MC_ADVANCE_RIP();
315 IEM_MC_END();
316 }
317 else
318 {
319 /*
320 * Register, memory.
321 */
322 IEM_MC_BEGIN(2, 2);
323 IEM_MC_ARG(uint64_t *, pDst, 0);
324 IEM_MC_LOCAL(uint64_t, uSrc);
325 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
327
328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
330 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
331 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
332
333 IEM_MC_PREPARE_FPU_USAGE();
334 IEM_MC_FPU_TO_MMX_MODE();
335
336 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
337 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
338 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
339
340 IEM_MC_ADVANCE_RIP();
341 IEM_MC_END();
342 }
343 return VINF_SUCCESS;
344}
345
346
347/**
348 * Common worker for SSE2 instructions on the forms:
349 * pxxx xmm1, xmm2/mem128
350 *
351 * Proper alignment of the 128-bit operand is enforced.
352 * Exceptions type 4. SSE2 cpuid checks.
353 *
354 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
355 */
356FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
357{
358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
359 if (IEM_IS_MODRM_REG_MODE(bRm))
360 {
361 /*
362 * Register, register.
363 */
364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
365 IEM_MC_BEGIN(2, 0);
366 IEM_MC_ARG(PRTUINT128U, pDst, 0);
367 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
368 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
369 IEM_MC_PREPARE_SSE_USAGE();
370 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
371 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
372 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 }
376 else
377 {
378 /*
379 * Register, memory.
380 */
381 IEM_MC_BEGIN(2, 2);
382 IEM_MC_ARG(PRTUINT128U, pDst, 0);
383 IEM_MC_LOCAL(RTUINT128U, uSrc);
384 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
386
387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
389 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
390 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
391
392 IEM_MC_PREPARE_SSE_USAGE();
393 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
394 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
395
396 IEM_MC_ADVANCE_RIP();
397 IEM_MC_END();
398 }
399 return VINF_SUCCESS;
400}
401
402
403/**
404 * Common worker for SSE2 instructions on the forms:
405 * pxxx xmm1, xmm2/mem128
406 *
407 * Proper alignment of the 128-bit operand is enforced.
408 * Exceptions type 4. SSE2 cpuid checks.
409 *
410 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
411 * no FXSAVE state, just the operands.
412 *
413 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
414 */
415FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
416{
417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
418 if (IEM_IS_MODRM_REG_MODE(bRm))
419 {
420 /*
421 * Register, register.
422 */
423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
424 IEM_MC_BEGIN(2, 0);
425 IEM_MC_ARG(PRTUINT128U, pDst, 0);
426 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
427 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
428 IEM_MC_PREPARE_SSE_USAGE();
429 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
430 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
431 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
432 IEM_MC_ADVANCE_RIP();
433 IEM_MC_END();
434 }
435 else
436 {
437 /*
438 * Register, memory.
439 */
440 IEM_MC_BEGIN(2, 2);
441 IEM_MC_ARG(PRTUINT128U, pDst, 0);
442 IEM_MC_LOCAL(RTUINT128U, uSrc);
443 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
445
446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
448 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
449 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
450
451 IEM_MC_PREPARE_SSE_USAGE();
452 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
453 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
454
455 IEM_MC_ADVANCE_RIP();
456 IEM_MC_END();
457 }
458 return VINF_SUCCESS;
459}
460
461
462/**
463 * Common worker for MMX instructions on the forms:
464 * pxxxx mm1, mm2/mem32
465 *
466 * The 2nd operand is the first half of a register, which in the memory case
467 * means a 32-bit memory access.
468 */
469FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
470{
471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
472 if (IEM_IS_MODRM_REG_MODE(bRm))
473 {
474 /*
475 * Register, register.
476 */
477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
478 IEM_MC_BEGIN(2, 0);
479 IEM_MC_ARG(uint64_t *, puDst, 0);
480 IEM_MC_ARG(uint64_t const *, puSrc, 1);
481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_FPU_TO_MMX_MODE();
484
485 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
486 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
487 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
488 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
489
490 IEM_MC_ADVANCE_RIP();
491 IEM_MC_END();
492 }
493 else
494 {
495 /*
496 * Register, memory.
497 */
498 IEM_MC_BEGIN(2, 2);
499 IEM_MC_ARG(uint64_t *, puDst, 0);
500 IEM_MC_LOCAL(uint64_t, uSrc);
501 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
506 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
507 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
508
509 IEM_MC_PREPARE_FPU_USAGE();
510 IEM_MC_FPU_TO_MMX_MODE();
511
512 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
513 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
514 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
515
516 IEM_MC_ADVANCE_RIP();
517 IEM_MC_END();
518 }
519 return VINF_SUCCESS;
520}
521
522
523/**
524 * Common worker for SSE instructions on the forms:
525 * pxxxx xmm1, xmm2/mem128
526 *
527 * The 2nd operand is the first half of a register, which in the memory case
528 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
529 *
530 * Exceptions type 4.
531 */
532FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
533{
534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
535 if (IEM_IS_MODRM_REG_MODE(bRm))
536 {
537 /*
538 * Register, register.
539 */
540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
541 IEM_MC_BEGIN(2, 0);
542 IEM_MC_ARG(PRTUINT128U, puDst, 0);
543 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
548 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 else
553 {
554 /*
555 * Register, memory.
556 */
557 IEM_MC_BEGIN(2, 2);
558 IEM_MC_ARG(PRTUINT128U, puDst, 0);
559 IEM_MC_LOCAL(RTUINT128U, uSrc);
560 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
562
563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
566 /** @todo Most CPUs probably only read the low qword. We read everything to
567 * make sure we apply segmentation and alignment checks correctly.
568 * When we have time, it would be interesting to explore what real
569 * CPUs actually does and whether it will do a TLB load for the high
570 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
571 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
572
573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
574 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
575 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
576
577 IEM_MC_ADVANCE_RIP();
578 IEM_MC_END();
579 }
580 return VINF_SUCCESS;
581}
582
583
584/**
585 * Common worker for SSE2 instructions on the forms:
586 * pxxxx xmm1, xmm2/mem128
587 *
588 * The 2nd operand is the first half of a register, which in the memory case
589 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
590 *
591 * Exceptions type 4.
592 */
593FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
594{
595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
596 if (IEM_IS_MODRM_REG_MODE(bRm))
597 {
598 /*
599 * Register, register.
600 */
601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
602 IEM_MC_BEGIN(2, 0);
603 IEM_MC_ARG(PRTUINT128U, puDst, 0);
604 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
609 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 else
614 {
615 /*
616 * Register, memory.
617 */
618 IEM_MC_BEGIN(2, 2);
619 IEM_MC_ARG(PRTUINT128U, puDst, 0);
620 IEM_MC_LOCAL(RTUINT128U, uSrc);
621 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
623
624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
626 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
627 /** @todo Most CPUs probably only read the low qword. We read everything to
628 * make sure we apply segmentation and alignment checks correctly.
629 * When we have time, it would be interesting to explore what real
630 * CPUs actually does and whether it will do a TLB load for the high
631 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
632 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
633
634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
635 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
636 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
637
638 IEM_MC_ADVANCE_RIP();
639 IEM_MC_END();
640 }
641 return VINF_SUCCESS;
642}
643
644
645/**
646 * Common worker for MMX instructions on the form:
647 * pxxxx mm1, mm2/mem64
648 *
649 * The 2nd operand is the second half of a register, which in the memory case
650 * means a 64-bit memory access for MMX.
651 */
652FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
653{
654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
655 if (IEM_IS_MODRM_REG_MODE(bRm))
656 {
657 /*
658 * Register, register.
659 */
660 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
661 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
663 IEM_MC_BEGIN(2, 0);
664 IEM_MC_ARG(uint64_t *, puDst, 0);
665 IEM_MC_ARG(uint64_t const *, puSrc, 1);
666 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
667 IEM_MC_PREPARE_FPU_USAGE();
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
671 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
672 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
673 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
674
675 IEM_MC_ADVANCE_RIP();
676 IEM_MC_END();
677 }
678 else
679 {
680 /*
681 * Register, memory.
682 */
683 IEM_MC_BEGIN(2, 2);
684 IEM_MC_ARG(uint64_t *, puDst, 0);
685 IEM_MC_LOCAL(uint64_t, uSrc);
686 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
688
689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
691 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
692 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
693
694 IEM_MC_PREPARE_FPU_USAGE();
695 IEM_MC_FPU_TO_MMX_MODE();
696
697 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
698 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
699 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
700
701 IEM_MC_ADVANCE_RIP();
702 IEM_MC_END();
703 }
704 return VINF_SUCCESS;
705}
706
707
708/**
709 * Common worker for SSE instructions on the form:
710 * pxxxx xmm1, xmm2/mem128
711 *
712 * The 2nd operand is the second half of a register, which for SSE a 128-bit
713 * aligned access where it may read the full 128 bits or only the upper 64 bits.
714 *
715 * Exceptions type 4.
716 */
717FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
718{
719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
720 if (IEM_IS_MODRM_REG_MODE(bRm))
721 {
722 /*
723 * Register, register.
724 */
725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
726 IEM_MC_BEGIN(2, 0);
727 IEM_MC_ARG(PRTUINT128U, puDst, 0);
728 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
730 IEM_MC_PREPARE_SSE_USAGE();
731 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
732 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
733 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
734 IEM_MC_ADVANCE_RIP();
735 IEM_MC_END();
736 }
737 else
738 {
739 /*
740 * Register, memory.
741 */
742 IEM_MC_BEGIN(2, 2);
743 IEM_MC_ARG(PRTUINT128U, puDst, 0);
744 IEM_MC_LOCAL(RTUINT128U, uSrc);
745 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
747
748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
750 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
751 /** @todo Most CPUs probably only read the high qword. We read everything to
752 * make sure we apply segmentation and alignment checks correctly.
753 * When we have time, it would be interesting to explore what real
754 * CPUs actually does and whether it will do a TLB load for the lower
755 * part or skip any associated \#PF. */
756 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
757
758 IEM_MC_PREPARE_SSE_USAGE();
759 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
760 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
761
762 IEM_MC_ADVANCE_RIP();
763 IEM_MC_END();
764 }
765 return VINF_SUCCESS;
766}
767
768
769/**
770 * Common worker for SSE instructions on the forms:
771 * pxxs xmm1, xmm2/mem128
772 *
773 * Proper alignment of the 128-bit operand is enforced.
774 * Exceptions type 2. SSE cpuid checks.
775 *
776 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
777 */
778FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
779{
780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
781 if (IEM_IS_MODRM_REG_MODE(bRm))
782 {
783 /*
784 * Register, register.
785 */
786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
787 IEM_MC_BEGIN(3, 1);
788 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
789 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
790 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
791 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
793 IEM_MC_PREPARE_SSE_USAGE();
794 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
795 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
796 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
797 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
798 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
799
800 IEM_MC_ADVANCE_RIP();
801 IEM_MC_END();
802 }
803 else
804 {
805 /*
806 * Register, memory.
807 */
808 IEM_MC_BEGIN(3, 2);
809 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
810 IEM_MC_LOCAL(X86XMMREG, uSrc2);
811 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
812 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
813 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
815
816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
819 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
820
821 IEM_MC_PREPARE_SSE_USAGE();
822 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
823 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
824 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
825 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
826
827 IEM_MC_ADVANCE_RIP();
828 IEM_MC_END();
829 }
830 return VINF_SUCCESS;
831}
832
833
834/**
835 * Common worker for SSE instructions on the forms:
836 * pxxs xmm1, xmm2/mem32
837 *
838 * Proper alignment of the 128-bit operand is enforced.
839 * Exceptions type 2. SSE cpuid checks.
840 *
841 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
842 */
843FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
844{
845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
846 if (IEM_IS_MODRM_REG_MODE(bRm))
847 {
848 /*
849 * Register, register.
850 */
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_BEGIN(3, 1);
853 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
857 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
858 IEM_MC_PREPARE_SSE_USAGE();
859 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
860 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
861 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
862 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
863 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
864
865 IEM_MC_ADVANCE_RIP();
866 IEM_MC_END();
867 }
868 else
869 {
870 /*
871 * Register, memory.
872 */
873 IEM_MC_BEGIN(3, 2);
874 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
875 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
876 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
877 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
878 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
880
881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
883 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
884 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
885
886 IEM_MC_PREPARE_SSE_USAGE();
887 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
888 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
889 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
890 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
891
892 IEM_MC_ADVANCE_RIP();
893 IEM_MC_END();
894 }
895 return VINF_SUCCESS;
896}
897
898
899/**
900 * Common worker for SSE2 instructions on the forms:
901 * pxxd xmm1, xmm2/mem128
902 *
903 * Proper alignment of the 128-bit operand is enforced.
904 * Exceptions type 2. SSE cpuid checks.
905 *
906 * @sa iemOpCommonSseFp_FullFull_To_Full
907 */
908FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
909{
910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
911 if (IEM_IS_MODRM_REG_MODE(bRm))
912 {
913 /*
914 * Register, register.
915 */
916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
917 IEM_MC_BEGIN(3, 1);
918 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
919 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
920 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
921 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
922 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
923 IEM_MC_PREPARE_SSE_USAGE();
924 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
925 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
926 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
927 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
928 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
929
930 IEM_MC_ADVANCE_RIP();
931 IEM_MC_END();
932 }
933 else
934 {
935 /*
936 * Register, memory.
937 */
938 IEM_MC_BEGIN(3, 2);
939 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
940 IEM_MC_LOCAL(X86XMMREG, uSrc2);
941 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
942 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
943 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
945
946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
948 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
949 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
950
951 IEM_MC_PREPARE_SSE_USAGE();
952 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
953 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
954 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
956
957 IEM_MC_ADVANCE_RIP();
958 IEM_MC_END();
959 }
960 return VINF_SUCCESS;
961}
962
963
964/**
965 * Common worker for SSE2 instructions on the forms:
966 * pxxs xmm1, xmm2/mem64
967 *
968 * Proper alignment of the 128-bit operand is enforced.
969 * Exceptions type 2. SSE2 cpuid checks.
970 *
971 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
972 */
973FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
974{
975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
976 if (IEM_IS_MODRM_REG_MODE(bRm))
977 {
978 /*
979 * Register, register.
980 */
981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
982 IEM_MC_BEGIN(3, 1);
983 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
984 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
985 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
986 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
988 IEM_MC_PREPARE_SSE_USAGE();
989 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
990 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
991 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
992 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
994
995 IEM_MC_ADVANCE_RIP();
996 IEM_MC_END();
997 }
998 else
999 {
1000 /*
1001 * Register, memory.
1002 */
1003 IEM_MC_BEGIN(3, 2);
1004 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1005 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1006 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1007 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1008 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1010
1011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1013 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1014 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1015
1016 IEM_MC_PREPARE_SSE_USAGE();
1017 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1018 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1019 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1020 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1021
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 }
1025 return VINF_SUCCESS;
1026}
1027
1028
1029/**
1030 * Common worker for SSE2 instructions on the form:
1031 * pxxxx xmm1, xmm2/mem128
1032 *
1033 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1034 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1035 *
1036 * Exceptions type 4.
1037 */
1038FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1039{
1040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1041 if (IEM_IS_MODRM_REG_MODE(bRm))
1042 {
1043 /*
1044 * Register, register.
1045 */
1046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1047 IEM_MC_BEGIN(2, 0);
1048 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1049 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1051 IEM_MC_PREPARE_SSE_USAGE();
1052 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1053 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1054 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1055 IEM_MC_ADVANCE_RIP();
1056 IEM_MC_END();
1057 }
1058 else
1059 {
1060 /*
1061 * Register, memory.
1062 */
1063 IEM_MC_BEGIN(2, 2);
1064 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1065 IEM_MC_LOCAL(RTUINT128U, uSrc);
1066 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1068
1069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1072 /** @todo Most CPUs probably only read the high qword. We read everything to
1073 * make sure we apply segmentation and alignment checks correctly.
1074 * When we have time, it would be interesting to explore what real
1075 * CPUs actually does and whether it will do a TLB load for the lower
1076 * part or skip any associated \#PF. */
1077 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1078
1079 IEM_MC_PREPARE_SSE_USAGE();
1080 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1081 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1082
1083 IEM_MC_ADVANCE_RIP();
1084 IEM_MC_END();
1085 }
1086 return VINF_SUCCESS;
1087}
1088
1089
1090/**
1091 * Common worker for SSE3 instructions on the forms:
1092 * hxxx xmm1, xmm2/mem128
1093 *
1094 * Proper alignment of the 128-bit operand is enforced.
1095 * Exceptions type 2. SSE3 cpuid checks.
1096 *
1097 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1098 */
1099FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1100{
1101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1102 if (IEM_IS_MODRM_REG_MODE(bRm))
1103 {
1104 /*
1105 * Register, register.
1106 */
1107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1108 IEM_MC_BEGIN(3, 1);
1109 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1110 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1111 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1112 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1113 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1114 IEM_MC_PREPARE_SSE_USAGE();
1115 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1116 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1117 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1118 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1119 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1120
1121 IEM_MC_ADVANCE_RIP();
1122 IEM_MC_END();
1123 }
1124 else
1125 {
1126 /*
1127 * Register, memory.
1128 */
1129 IEM_MC_BEGIN(3, 2);
1130 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1131 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1132 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1133 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1134 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1136
1137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1139 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1140 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1141
1142 IEM_MC_PREPARE_SSE_USAGE();
1143 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1144 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1145 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1146 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1147
1148 IEM_MC_ADVANCE_RIP();
1149 IEM_MC_END();
1150 }
1151 return VINF_SUCCESS;
1152}
1153
1154
1155/** Opcode 0x0f 0x00 /0. */
1156FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1157{
1158 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1159 IEMOP_HLP_MIN_286();
1160 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1161
1162 if (IEM_IS_MODRM_REG_MODE(bRm))
1163 {
1164 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1165 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1166 }
1167
1168 /* Ignore operand size here, memory refs are always 16-bit. */
1169 IEM_MC_BEGIN(2, 0);
1170 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1171 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1173 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1174 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1175 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1176 IEM_MC_END();
1177 return VINF_SUCCESS;
1178}
1179
1180
1181/** Opcode 0x0f 0x00 /1. */
1182FNIEMOPRM_DEF(iemOp_Grp6_str)
1183{
1184 IEMOP_MNEMONIC(str, "str Rv/Mw");
1185 IEMOP_HLP_MIN_286();
1186 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1187
1188
1189 if (IEM_IS_MODRM_REG_MODE(bRm))
1190 {
1191 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1192 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1193 }
1194
1195 /* Ignore operand size here, memory refs are always 16-bit. */
1196 IEM_MC_BEGIN(2, 0);
1197 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1198 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1200 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1201 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1202 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1203 IEM_MC_END();
1204 return VINF_SUCCESS;
1205}
1206
1207
1208/** Opcode 0x0f 0x00 /2. */
1209FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1210{
1211 IEMOP_MNEMONIC(lldt, "lldt Ew");
1212 IEMOP_HLP_MIN_286();
1213 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1214
1215 if (IEM_IS_MODRM_REG_MODE(bRm))
1216 {
1217 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1218 IEM_MC_BEGIN(1, 0);
1219 IEM_MC_ARG(uint16_t, u16Sel, 0);
1220 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1221 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1222 IEM_MC_END();
1223 }
1224 else
1225 {
1226 IEM_MC_BEGIN(1, 1);
1227 IEM_MC_ARG(uint16_t, u16Sel, 0);
1228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1230 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1231 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1232 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1233 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1234 IEM_MC_END();
1235 }
1236 return VINF_SUCCESS;
1237}
1238
1239
1240/** Opcode 0x0f 0x00 /3. */
1241FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1242{
1243 IEMOP_MNEMONIC(ltr, "ltr Ew");
1244 IEMOP_HLP_MIN_286();
1245 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1246
1247 if (IEM_IS_MODRM_REG_MODE(bRm))
1248 {
1249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1250 IEM_MC_BEGIN(1, 0);
1251 IEM_MC_ARG(uint16_t, u16Sel, 0);
1252 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1253 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1254 IEM_MC_END();
1255 }
1256 else
1257 {
1258 IEM_MC_BEGIN(1, 1);
1259 IEM_MC_ARG(uint16_t, u16Sel, 0);
1260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1263 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1264 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1265 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1266 IEM_MC_END();
1267 }
1268 return VINF_SUCCESS;
1269}
1270
1271
1272/** Opcode 0x0f 0x00 /3. */
1273FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1274{
1275 IEMOP_HLP_MIN_286();
1276 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1277
1278 if (IEM_IS_MODRM_REG_MODE(bRm))
1279 {
1280 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1281 IEM_MC_BEGIN(2, 0);
1282 IEM_MC_ARG(uint16_t, u16Sel, 0);
1283 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1284 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1285 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1286 IEM_MC_END();
1287 }
1288 else
1289 {
1290 IEM_MC_BEGIN(2, 1);
1291 IEM_MC_ARG(uint16_t, u16Sel, 0);
1292 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1295 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1296 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1297 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1298 IEM_MC_END();
1299 }
1300 return VINF_SUCCESS;
1301}
1302
1303
1304/** Opcode 0x0f 0x00 /4. */
1305FNIEMOPRM_DEF(iemOp_Grp6_verr)
1306{
1307 IEMOP_MNEMONIC(verr, "verr Ew");
1308 IEMOP_HLP_MIN_286();
1309 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1310}
1311
1312
1313/** Opcode 0x0f 0x00 /5. */
1314FNIEMOPRM_DEF(iemOp_Grp6_verw)
1315{
1316 IEMOP_MNEMONIC(verw, "verw Ew");
1317 IEMOP_HLP_MIN_286();
1318 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1319}
1320
1321
1322/**
1323 * Group 6 jump table.
1324 */
1325IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1326{
1327 iemOp_Grp6_sldt,
1328 iemOp_Grp6_str,
1329 iemOp_Grp6_lldt,
1330 iemOp_Grp6_ltr,
1331 iemOp_Grp6_verr,
1332 iemOp_Grp6_verw,
1333 iemOp_InvalidWithRM,
1334 iemOp_InvalidWithRM
1335};
1336
1337/** Opcode 0x0f 0x00. */
1338FNIEMOP_DEF(iemOp_Grp6)
1339{
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1342}
1343
1344
1345/** Opcode 0x0f 0x01 /0. */
1346FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1347{
1348 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1349 IEMOP_HLP_MIN_286();
1350 IEMOP_HLP_64BIT_OP_SIZE();
1351 IEM_MC_BEGIN(2, 1);
1352 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1353 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1356 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1357 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1358 IEM_MC_END();
1359 return VINF_SUCCESS;
1360}
1361
1362
1363/** Opcode 0x0f 0x01 /0. */
1364FNIEMOP_DEF(iemOp_Grp7_vmcall)
1365{
1366 IEMOP_MNEMONIC(vmcall, "vmcall");
1367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1368
1369 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1370 want all hypercalls regardless of instruction used, and if a
1371 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1372 (NEM/win makes ASSUMPTIONS about this behavior.) */
1373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1379FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1380{
1381 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1382 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1383 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1384 IEMOP_HLP_DONE_DECODING();
1385 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1386}
1387#else
1388FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1389{
1390 IEMOP_BITCH_ABOUT_STUB();
1391 return IEMOP_RAISE_INVALID_OPCODE();
1392}
1393#endif
1394
1395
1396/** Opcode 0x0f 0x01 /0. */
1397#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1398FNIEMOP_DEF(iemOp_Grp7_vmresume)
1399{
1400 IEMOP_MNEMONIC(vmresume, "vmresume");
1401 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1402 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1403 IEMOP_HLP_DONE_DECODING();
1404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1405}
1406#else
1407FNIEMOP_DEF(iemOp_Grp7_vmresume)
1408{
1409 IEMOP_BITCH_ABOUT_STUB();
1410 return IEMOP_RAISE_INVALID_OPCODE();
1411}
1412#endif
1413
1414
1415/** Opcode 0x0f 0x01 /0. */
1416#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1417FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1418{
1419 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1420 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1421 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1422 IEMOP_HLP_DONE_DECODING();
1423 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1424}
1425#else
1426FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1427{
1428 IEMOP_BITCH_ABOUT_STUB();
1429 return IEMOP_RAISE_INVALID_OPCODE();
1430}
1431#endif
1432
1433
1434/** Opcode 0x0f 0x01 /1. */
1435FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1436{
1437 IEMOP_MNEMONIC(sidt, "sidt Ms");
1438 IEMOP_HLP_MIN_286();
1439 IEMOP_HLP_64BIT_OP_SIZE();
1440 IEM_MC_BEGIN(2, 1);
1441 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1442 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1446 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1447 IEM_MC_END();
1448 return VINF_SUCCESS;
1449}
1450
1451
1452/** Opcode 0x0f 0x01 /1. */
1453FNIEMOP_DEF(iemOp_Grp7_monitor)
1454{
1455 IEMOP_MNEMONIC(monitor, "monitor");
1456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1457 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1458}
1459
1460
1461/** Opcode 0x0f 0x01 /1. */
1462FNIEMOP_DEF(iemOp_Grp7_mwait)
1463{
1464 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1466 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1467}
1468
1469
1470/** Opcode 0x0f 0x01 /2. */
1471FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1472{
1473 IEMOP_MNEMONIC(lgdt, "lgdt");
1474 IEMOP_HLP_64BIT_OP_SIZE();
1475 IEM_MC_BEGIN(3, 1);
1476 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1477 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1478 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1481 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1482 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1483 IEM_MC_END();
1484 return VINF_SUCCESS;
1485}
1486
1487
1488/** Opcode 0x0f 0x01 0xd0. */
1489FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1490{
1491 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1492 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1493 {
1494 /** @todo r=ramshankar: We should use
1495 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1496 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1498 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1499 }
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501}
1502
1503
1504/** Opcode 0x0f 0x01 0xd1. */
1505FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1506{
1507 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1508 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1509 {
1510 /** @todo r=ramshankar: We should use
1511 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1512 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1513 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1514 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1515 }
1516 return IEMOP_RAISE_INVALID_OPCODE();
1517}
1518
1519
1520/** Opcode 0x0f 0x01 /3. */
1521FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1522{
1523 IEMOP_MNEMONIC(lidt, "lidt");
1524 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1525 ? IEMMODE_64BIT
1526 : pVCpu->iem.s.enmEffOpSize;
1527 IEM_MC_BEGIN(3, 1);
1528 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1529 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1530 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1534 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1535 IEM_MC_END();
1536 return VINF_SUCCESS;
1537}
1538
1539
1540/** Opcode 0x0f 0x01 0xd8. */
1541#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1542FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1543{
1544 IEMOP_MNEMONIC(vmrun, "vmrun");
1545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1546 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1547}
1548#else
1549FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1550#endif
1551
1552/** Opcode 0x0f 0x01 0xd9. */
1553FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1554{
1555 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1557
1558 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1559 want all hypercalls regardless of instruction used, and if a
1560 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1561 (NEM/win makes ASSUMPTIONS about this behavior.) */
1562 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1563}
1564
1565/** Opcode 0x0f 0x01 0xda. */
1566#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1567FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1568{
1569 IEMOP_MNEMONIC(vmload, "vmload");
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1571 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1572}
1573#else
1574FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1575#endif
1576
1577
1578/** Opcode 0x0f 0x01 0xdb. */
1579#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1580FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1581{
1582 IEMOP_MNEMONIC(vmsave, "vmsave");
1583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1584 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1585}
1586#else
1587FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1588#endif
1589
1590
1591/** Opcode 0x0f 0x01 0xdc. */
1592#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1593FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1594{
1595 IEMOP_MNEMONIC(stgi, "stgi");
1596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1597 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1598}
1599#else
1600FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1601#endif
1602
1603
1604/** Opcode 0x0f 0x01 0xdd. */
1605#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1606FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1607{
1608 IEMOP_MNEMONIC(clgi, "clgi");
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1610 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1611}
1612#else
1613FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1614#endif
1615
1616
1617/** Opcode 0x0f 0x01 0xdf. */
1618#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1619FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1620{
1621 IEMOP_MNEMONIC(invlpga, "invlpga");
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1623 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1624}
1625#else
1626FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1627#endif
1628
1629
1630/** Opcode 0x0f 0x01 0xde. */
1631#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1632FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1633{
1634 IEMOP_MNEMONIC(skinit, "skinit");
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1637}
1638#else
1639FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1640#endif
1641
1642
1643/** Opcode 0x0f 0x01 /4. */
1644FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1645{
1646 IEMOP_MNEMONIC(smsw, "smsw");
1647 IEMOP_HLP_MIN_286();
1648 if (IEM_IS_MODRM_REG_MODE(bRm))
1649 {
1650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1651 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1652 }
1653
1654 /* Ignore operand size here, memory refs are always 16-bit. */
1655 IEM_MC_BEGIN(2, 0);
1656 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1657 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1661 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1662 IEM_MC_END();
1663 return VINF_SUCCESS;
1664}
1665
1666
1667/** Opcode 0x0f 0x01 /6. */
1668FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1669{
1670 /* The operand size is effectively ignored, all is 16-bit and only the
1671 lower 3-bits are used. */
1672 IEMOP_MNEMONIC(lmsw, "lmsw");
1673 IEMOP_HLP_MIN_286();
1674 if (IEM_IS_MODRM_REG_MODE(bRm))
1675 {
1676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1677 IEM_MC_BEGIN(2, 0);
1678 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1679 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1680 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1681 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1682 IEM_MC_END();
1683 }
1684 else
1685 {
1686 IEM_MC_BEGIN(2, 0);
1687 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1688 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1692 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1693 IEM_MC_END();
1694 }
1695 return VINF_SUCCESS;
1696}
1697
1698
1699/** Opcode 0x0f 0x01 /7. */
1700FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1701{
1702 IEMOP_MNEMONIC(invlpg, "invlpg");
1703 IEMOP_HLP_MIN_486();
1704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1705 IEM_MC_BEGIN(1, 1);
1706 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1708 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1709 IEM_MC_END();
1710 return VINF_SUCCESS;
1711}
1712
1713
1714/** Opcode 0x0f 0x01 /7. */
1715FNIEMOP_DEF(iemOp_Grp7_swapgs)
1716{
1717 IEMOP_MNEMONIC(swapgs, "swapgs");
1718 IEMOP_HLP_ONLY_64BIT();
1719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1720 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1721}
1722
1723
1724/** Opcode 0x0f 0x01 /7. */
1725FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1726{
1727 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1729 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1730}
1731
1732
1733/**
1734 * Group 7 jump table, memory variant.
1735 */
1736IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1737{
1738 iemOp_Grp7_sgdt,
1739 iemOp_Grp7_sidt,
1740 iemOp_Grp7_lgdt,
1741 iemOp_Grp7_lidt,
1742 iemOp_Grp7_smsw,
1743 iemOp_InvalidWithRM,
1744 iemOp_Grp7_lmsw,
1745 iemOp_Grp7_invlpg
1746};
1747
1748
1749/** Opcode 0x0f 0x01. */
1750FNIEMOP_DEF(iemOp_Grp7)
1751{
1752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1753 if (IEM_IS_MODRM_MEM_MODE(bRm))
1754 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1755
1756 switch (IEM_GET_MODRM_REG_8(bRm))
1757 {
1758 case 0:
1759 switch (IEM_GET_MODRM_RM_8(bRm))
1760 {
1761 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1762 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1763 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1764 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1765 }
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767
1768 case 1:
1769 switch (IEM_GET_MODRM_RM_8(bRm))
1770 {
1771 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1772 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1773 }
1774 return IEMOP_RAISE_INVALID_OPCODE();
1775
1776 case 2:
1777 switch (IEM_GET_MODRM_RM_8(bRm))
1778 {
1779 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1780 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1781 }
1782 return IEMOP_RAISE_INVALID_OPCODE();
1783
1784 case 3:
1785 switch (IEM_GET_MODRM_RM_8(bRm))
1786 {
1787 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1788 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1789 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1790 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1791 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1792 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1793 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1794 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1796 }
1797
1798 case 4:
1799 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1800
1801 case 5:
1802 return IEMOP_RAISE_INVALID_OPCODE();
1803
1804 case 6:
1805 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1806
1807 case 7:
1808 switch (IEM_GET_MODRM_RM_8(bRm))
1809 {
1810 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1811 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1812 }
1813 return IEMOP_RAISE_INVALID_OPCODE();
1814
1815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1816 }
1817}
1818
1819/** Opcode 0x0f 0x00 /3. */
1820FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1821{
1822 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1824
1825 if (IEM_IS_MODRM_REG_MODE(bRm))
1826 {
1827 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1828 switch (pVCpu->iem.s.enmEffOpSize)
1829 {
1830 case IEMMODE_16BIT:
1831 {
1832 IEM_MC_BEGIN(3, 0);
1833 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1834 IEM_MC_ARG(uint16_t, u16Sel, 1);
1835 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1836
1837 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1838 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1839 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1840
1841 IEM_MC_END();
1842 return VINF_SUCCESS;
1843 }
1844
1845 case IEMMODE_32BIT:
1846 case IEMMODE_64BIT:
1847 {
1848 IEM_MC_BEGIN(3, 0);
1849 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1850 IEM_MC_ARG(uint16_t, u16Sel, 1);
1851 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1852
1853 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1854 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1855 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1856
1857 IEM_MC_END();
1858 return VINF_SUCCESS;
1859 }
1860
1861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1862 }
1863 }
1864 else
1865 {
1866 switch (pVCpu->iem.s.enmEffOpSize)
1867 {
1868 case IEMMODE_16BIT:
1869 {
1870 IEM_MC_BEGIN(3, 1);
1871 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1872 IEM_MC_ARG(uint16_t, u16Sel, 1);
1873 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1875
1876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1877 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1878
1879 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1881 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1882
1883 IEM_MC_END();
1884 return VINF_SUCCESS;
1885 }
1886
1887 case IEMMODE_32BIT:
1888 case IEMMODE_64BIT:
1889 {
1890 IEM_MC_BEGIN(3, 1);
1891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1892 IEM_MC_ARG(uint16_t, u16Sel, 1);
1893 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1895
1896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1897 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1898/** @todo testcase: make sure it's a 16-bit read. */
1899
1900 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1901 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1902 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1903
1904 IEM_MC_END();
1905 return VINF_SUCCESS;
1906 }
1907
1908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1909 }
1910 }
1911}
1912
1913
1914
1915/** Opcode 0x0f 0x02. */
1916FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1917{
1918 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1919 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1920}
1921
1922
1923/** Opcode 0x0f 0x03. */
1924FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1925{
1926 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1927 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1928}
1929
1930
1931/** Opcode 0x0f 0x05. */
1932FNIEMOP_DEF(iemOp_syscall)
1933{
1934 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1936 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1937}
1938
1939
1940/** Opcode 0x0f 0x06. */
1941FNIEMOP_DEF(iemOp_clts)
1942{
1943 IEMOP_MNEMONIC(clts, "clts");
1944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1945 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1946}
1947
1948
1949/** Opcode 0x0f 0x07. */
1950FNIEMOP_DEF(iemOp_sysret)
1951{
1952 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1955}
1956
1957
1958/** Opcode 0x0f 0x08. */
1959FNIEMOP_DEF(iemOp_invd)
1960{
1961 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1962 IEMOP_HLP_MIN_486();
1963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1964 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1965}
1966
1967
1968/** Opcode 0x0f 0x09. */
1969FNIEMOP_DEF(iemOp_wbinvd)
1970{
1971 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1972 IEMOP_HLP_MIN_486();
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1975}
1976
1977
1978/** Opcode 0x0f 0x0b. */
1979FNIEMOP_DEF(iemOp_ud2)
1980{
1981 IEMOP_MNEMONIC(ud2, "ud2");
1982 return IEMOP_RAISE_INVALID_OPCODE();
1983}
1984
1985/** Opcode 0x0f 0x0d. */
1986FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1987{
1988 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1989 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1990 {
1991 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1992 return IEMOP_RAISE_INVALID_OPCODE();
1993 }
1994
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1996 if (IEM_IS_MODRM_REG_MODE(bRm))
1997 {
1998 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1999 return IEMOP_RAISE_INVALID_OPCODE();
2000 }
2001
2002 switch (IEM_GET_MODRM_REG_8(bRm))
2003 {
2004 case 2: /* Aliased to /0 for the time being. */
2005 case 4: /* Aliased to /0 for the time being. */
2006 case 5: /* Aliased to /0 for the time being. */
2007 case 6: /* Aliased to /0 for the time being. */
2008 case 7: /* Aliased to /0 for the time being. */
2009 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2010 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2011 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2013 }
2014
2015 IEM_MC_BEGIN(0, 1);
2016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2019 /* Currently a NOP. */
2020 NOREF(GCPtrEffSrc);
2021 IEM_MC_ADVANCE_RIP();
2022 IEM_MC_END();
2023 return VINF_SUCCESS;
2024}
2025
2026
2027/** Opcode 0x0f 0x0e. */
2028FNIEMOP_DEF(iemOp_femms)
2029{
2030 IEMOP_MNEMONIC(femms, "femms");
2031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2032
2033 IEM_MC_BEGIN(0,0);
2034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2036 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2037 IEM_MC_FPU_FROM_MMX_MODE();
2038 IEM_MC_ADVANCE_RIP();
2039 IEM_MC_END();
2040 return VINF_SUCCESS;
2041}
2042
2043
2044/** Opcode 0x0f 0x0f. */
2045FNIEMOP_DEF(iemOp_3Dnow)
2046{
2047 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2048 {
2049 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2050 return IEMOP_RAISE_INVALID_OPCODE();
2051 }
2052
2053#ifdef IEM_WITH_3DNOW
2054 /* This is pretty sparse, use switch instead of table. */
2055 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2056 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2057#else
2058 IEMOP_BITCH_ABOUT_STUB();
2059 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2060#endif
2061}
2062
2063
2064/**
2065 * @opcode 0x10
2066 * @oppfx none
2067 * @opcpuid sse
2068 * @opgroup og_sse_simdfp_datamove
2069 * @opxcpttype 4UA
2070 * @optest op1=1 op2=2 -> op1=2
2071 * @optest op1=0 op2=-22 -> op1=-22
2072 */
2073FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2074{
2075 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2077 if (IEM_IS_MODRM_REG_MODE(bRm))
2078 {
2079 /*
2080 * Register, register.
2081 */
2082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2083 IEM_MC_BEGIN(0, 0);
2084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2086 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2087 IEM_GET_MODRM_RM(pVCpu, bRm));
2088 IEM_MC_ADVANCE_RIP();
2089 IEM_MC_END();
2090 }
2091 else
2092 {
2093 /*
2094 * Memory, register.
2095 */
2096 IEM_MC_BEGIN(0, 2);
2097 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2104
2105 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2106 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2107
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 return VINF_SUCCESS;
2112
2113}
2114
2115
2116/**
2117 * @opcode 0x10
2118 * @oppfx 0x66
2119 * @opcpuid sse2
2120 * @opgroup og_sse2_pcksclr_datamove
2121 * @opxcpttype 4UA
2122 * @optest op1=1 op2=2 -> op1=2
2123 * @optest op1=0 op2=-42 -> op1=-42
2124 */
2125FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2126{
2127 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2129 if (IEM_IS_MODRM_REG_MODE(bRm))
2130 {
2131 /*
2132 * Register, register.
2133 */
2134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2135 IEM_MC_BEGIN(0, 0);
2136 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2138 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2139 IEM_GET_MODRM_RM(pVCpu, bRm));
2140 IEM_MC_ADVANCE_RIP();
2141 IEM_MC_END();
2142 }
2143 else
2144 {
2145 /*
2146 * Memory, register.
2147 */
2148 IEM_MC_BEGIN(0, 2);
2149 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2151
2152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2154 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2156
2157 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2158 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2159
2160 IEM_MC_ADVANCE_RIP();
2161 IEM_MC_END();
2162 }
2163 return VINF_SUCCESS;
2164}
2165
2166
2167/**
2168 * @opcode 0x10
2169 * @oppfx 0xf3
2170 * @opcpuid sse
2171 * @opgroup og_sse_simdfp_datamove
2172 * @opxcpttype 5
2173 * @optest op1=1 op2=2 -> op1=2
2174 * @optest op1=0 op2=-22 -> op1=-22
2175 */
2176FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2177{
2178 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2180 if (IEM_IS_MODRM_REG_MODE(bRm))
2181 {
2182 /*
2183 * Register, register.
2184 */
2185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2186 IEM_MC_BEGIN(0, 1);
2187 IEM_MC_LOCAL(uint32_t, uSrc);
2188
2189 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2190 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2191 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2192 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2193
2194 IEM_MC_ADVANCE_RIP();
2195 IEM_MC_END();
2196 }
2197 else
2198 {
2199 /*
2200 * Memory, register.
2201 */
2202 IEM_MC_BEGIN(0, 2);
2203 IEM_MC_LOCAL(uint32_t, uSrc);
2204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2205
2206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2208 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2209 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2210
2211 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2212 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2213
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 }
2217 return VINF_SUCCESS;
2218}
2219
2220
2221/**
2222 * @opcode 0x10
2223 * @oppfx 0xf2
2224 * @opcpuid sse2
2225 * @opgroup og_sse2_pcksclr_datamove
2226 * @opxcpttype 5
2227 * @optest op1=1 op2=2 -> op1=2
2228 * @optest op1=0 op2=-42 -> op1=-42
2229 */
2230FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2231{
2232 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 if (IEM_IS_MODRM_REG_MODE(bRm))
2235 {
2236 /*
2237 * Register, register.
2238 */
2239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2240 IEM_MC_BEGIN(0, 1);
2241 IEM_MC_LOCAL(uint64_t, uSrc);
2242
2243 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2244 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2245 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2246 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2247
2248 IEM_MC_ADVANCE_RIP();
2249 IEM_MC_END();
2250 }
2251 else
2252 {
2253 /*
2254 * Memory, register.
2255 */
2256 IEM_MC_BEGIN(0, 2);
2257 IEM_MC_LOCAL(uint64_t, uSrc);
2258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2259
2260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2263 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2264
2265 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2266 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2267
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 return VINF_SUCCESS;
2272}
2273
2274
2275/**
2276 * @opcode 0x11
2277 * @oppfx none
2278 * @opcpuid sse
2279 * @opgroup og_sse_simdfp_datamove
2280 * @opxcpttype 4UA
2281 * @optest op1=1 op2=2 -> op1=2
2282 * @optest op1=0 op2=-42 -> op1=-42
2283 */
2284FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2285{
2286 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2288 if (IEM_IS_MODRM_REG_MODE(bRm))
2289 {
2290 /*
2291 * Register, register.
2292 */
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2294 IEM_MC_BEGIN(0, 0);
2295 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2296 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2297 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2298 IEM_GET_MODRM_REG(pVCpu, bRm));
2299 IEM_MC_ADVANCE_RIP();
2300 IEM_MC_END();
2301 }
2302 else
2303 {
2304 /*
2305 * Memory, register.
2306 */
2307 IEM_MC_BEGIN(0, 2);
2308 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2310
2311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2314 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2315
2316 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2317 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2318
2319 IEM_MC_ADVANCE_RIP();
2320 IEM_MC_END();
2321 }
2322 return VINF_SUCCESS;
2323}
2324
2325
2326/**
2327 * @opcode 0x11
2328 * @oppfx 0x66
2329 * @opcpuid sse2
2330 * @opgroup og_sse2_pcksclr_datamove
2331 * @opxcpttype 4UA
2332 * @optest op1=1 op2=2 -> op1=2
2333 * @optest op1=0 op2=-42 -> op1=-42
2334 */
2335FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2336{
2337 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2339 if (IEM_IS_MODRM_REG_MODE(bRm))
2340 {
2341 /*
2342 * Register, register.
2343 */
2344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2345 IEM_MC_BEGIN(0, 0);
2346 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2348 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2349 IEM_GET_MODRM_REG(pVCpu, bRm));
2350 IEM_MC_ADVANCE_RIP();
2351 IEM_MC_END();
2352 }
2353 else
2354 {
2355 /*
2356 * Memory, register.
2357 */
2358 IEM_MC_BEGIN(0, 2);
2359 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2361
2362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2365 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2366
2367 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2368 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2369
2370 IEM_MC_ADVANCE_RIP();
2371 IEM_MC_END();
2372 }
2373 return VINF_SUCCESS;
2374}
2375
2376
2377/**
2378 * @opcode 0x11
2379 * @oppfx 0xf3
2380 * @opcpuid sse
2381 * @opgroup og_sse_simdfp_datamove
2382 * @opxcpttype 5
2383 * @optest op1=1 op2=2 -> op1=2
2384 * @optest op1=0 op2=-22 -> op1=-22
2385 */
2386FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2387{
2388 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2390 if (IEM_IS_MODRM_REG_MODE(bRm))
2391 {
2392 /*
2393 * Register, register.
2394 */
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEM_MC_BEGIN(0, 1);
2397 IEM_MC_LOCAL(uint32_t, uSrc);
2398
2399 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2400 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2401 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2402 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2403
2404 IEM_MC_ADVANCE_RIP();
2405 IEM_MC_END();
2406 }
2407 else
2408 {
2409 /*
2410 * Memory, register.
2411 */
2412 IEM_MC_BEGIN(0, 2);
2413 IEM_MC_LOCAL(uint32_t, uSrc);
2414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2415
2416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2419 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2420
2421 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2422 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2423
2424 IEM_MC_ADVANCE_RIP();
2425 IEM_MC_END();
2426 }
2427 return VINF_SUCCESS;
2428}
2429
2430
2431/**
2432 * @opcode 0x11
2433 * @oppfx 0xf2
2434 * @opcpuid sse2
2435 * @opgroup og_sse2_pcksclr_datamove
2436 * @opxcpttype 5
2437 * @optest op1=1 op2=2 -> op1=2
2438 * @optest op1=0 op2=-42 -> op1=-42
2439 */
2440FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2441{
2442 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2444 if (IEM_IS_MODRM_REG_MODE(bRm))
2445 {
2446 /*
2447 * Register, register.
2448 */
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_BEGIN(0, 1);
2451 IEM_MC_LOCAL(uint64_t, uSrc);
2452
2453 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2455 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2456 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2457
2458 IEM_MC_ADVANCE_RIP();
2459 IEM_MC_END();
2460 }
2461 else
2462 {
2463 /*
2464 * Memory, register.
2465 */
2466 IEM_MC_BEGIN(0, 2);
2467 IEM_MC_LOCAL(uint64_t, uSrc);
2468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2469
2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2472 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2473 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2474
2475 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2476 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2477
2478 IEM_MC_ADVANCE_RIP();
2479 IEM_MC_END();
2480 }
2481 return VINF_SUCCESS;
2482}
2483
2484
2485FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2486{
2487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2488 if (IEM_IS_MODRM_REG_MODE(bRm))
2489 {
2490 /**
2491 * @opcode 0x12
2492 * @opcodesub 11 mr/reg
2493 * @oppfx none
2494 * @opcpuid sse
2495 * @opgroup og_sse_simdfp_datamove
2496 * @opxcpttype 5
2497 * @optest op1=1 op2=2 -> op1=2
2498 * @optest op1=0 op2=-42 -> op1=-42
2499 */
2500 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2501
2502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2503 IEM_MC_BEGIN(0, 1);
2504 IEM_MC_LOCAL(uint64_t, uSrc);
2505
2506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2507 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2508 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2509 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2510
2511 IEM_MC_ADVANCE_RIP();
2512 IEM_MC_END();
2513 }
2514 else
2515 {
2516 /**
2517 * @opdone
2518 * @opcode 0x12
2519 * @opcodesub !11 mr/reg
2520 * @oppfx none
2521 * @opcpuid sse
2522 * @opgroup og_sse_simdfp_datamove
2523 * @opxcpttype 5
2524 * @optest op1=1 op2=2 -> op1=2
2525 * @optest op1=0 op2=-42 -> op1=-42
2526 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2527 */
2528 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2529
2530 IEM_MC_BEGIN(0, 2);
2531 IEM_MC_LOCAL(uint64_t, uSrc);
2532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2533
2534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2538
2539 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2540 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2541
2542 IEM_MC_ADVANCE_RIP();
2543 IEM_MC_END();
2544 }
2545 return VINF_SUCCESS;
2546}
2547
2548
2549/**
2550 * @opcode 0x12
2551 * @opcodesub !11 mr/reg
2552 * @oppfx 0x66
2553 * @opcpuid sse2
2554 * @opgroup og_sse2_pcksclr_datamove
2555 * @opxcpttype 5
2556 * @optest op1=1 op2=2 -> op1=2
2557 * @optest op1=0 op2=-42 -> op1=-42
2558 */
2559FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2560{
2561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2562 if (IEM_IS_MODRM_MEM_MODE(bRm))
2563 {
2564 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2565
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(uint64_t, uSrc);
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 return VINF_SUCCESS;
2581 }
2582
2583 /**
2584 * @opdone
2585 * @opmnemonic ud660f12m3
2586 * @opcode 0x12
2587 * @opcodesub 11 mr/reg
2588 * @oppfx 0x66
2589 * @opunused immediate
2590 * @opcpuid sse
2591 * @optest ->
2592 */
2593 return IEMOP_RAISE_INVALID_OPCODE();
2594}
2595
2596
2597/**
2598 * @opcode 0x12
2599 * @oppfx 0xf3
2600 * @opcpuid sse3
2601 * @opgroup og_sse3_pcksclr_datamove
2602 * @opxcpttype 4
2603 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2604 * op1=0x00000002000000020000000100000001
2605 */
2606FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2607{
2608 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2610 if (IEM_IS_MODRM_REG_MODE(bRm))
2611 {
2612 /*
2613 * Register, register.
2614 */
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616 IEM_MC_BEGIN(2, 0);
2617 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2618 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2619
2620 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2621 IEM_MC_PREPARE_SSE_USAGE();
2622
2623 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2624 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2625 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2626
2627 IEM_MC_ADVANCE_RIP();
2628 IEM_MC_END();
2629 }
2630 else
2631 {
2632 /*
2633 * Register, memory.
2634 */
2635 IEM_MC_BEGIN(2, 2);
2636 IEM_MC_LOCAL(RTUINT128U, uSrc);
2637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2638 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2639 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2640
2641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2643 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2644 IEM_MC_PREPARE_SSE_USAGE();
2645
2646 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2647 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2648 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2649
2650 IEM_MC_ADVANCE_RIP();
2651 IEM_MC_END();
2652 }
2653 return VINF_SUCCESS;
2654}
2655
2656
2657/**
2658 * @opcode 0x12
2659 * @oppfx 0xf2
2660 * @opcpuid sse3
2661 * @opgroup og_sse3_pcksclr_datamove
2662 * @opxcpttype 5
2663 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2664 * op1=0x22222222111111112222222211111111
2665 */
2666FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2667{
2668 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2670 if (IEM_IS_MODRM_REG_MODE(bRm))
2671 {
2672 /*
2673 * Register, register.
2674 */
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_BEGIN(2, 0);
2677 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2678 IEM_MC_ARG(uint64_t, uSrc, 1);
2679
2680 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2681 IEM_MC_PREPARE_SSE_USAGE();
2682
2683 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2684 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2685 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2686
2687 IEM_MC_ADVANCE_RIP();
2688 IEM_MC_END();
2689 }
2690 else
2691 {
2692 /*
2693 * Register, memory.
2694 */
2695 IEM_MC_BEGIN(2, 2);
2696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2697 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2698 IEM_MC_ARG(uint64_t, uSrc, 1);
2699
2700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2702 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2703 IEM_MC_PREPARE_SSE_USAGE();
2704
2705 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2706 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2707 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2708
2709 IEM_MC_ADVANCE_RIP();
2710 IEM_MC_END();
2711 }
2712 return VINF_SUCCESS;
2713}
2714
2715
2716/**
2717 * @opcode 0x13
2718 * @opcodesub !11 mr/reg
2719 * @oppfx none
2720 * @opcpuid sse
2721 * @opgroup og_sse_simdfp_datamove
2722 * @opxcpttype 5
2723 * @optest op1=1 op2=2 -> op1=2
2724 * @optest op1=0 op2=-42 -> op1=-42
2725 */
2726FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2727{
2728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2729 if (IEM_IS_MODRM_MEM_MODE(bRm))
2730 {
2731 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2732
2733 IEM_MC_BEGIN(0, 2);
2734 IEM_MC_LOCAL(uint64_t, uSrc);
2735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2736
2737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2739 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2740 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2741
2742 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2743 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2744
2745 IEM_MC_ADVANCE_RIP();
2746 IEM_MC_END();
2747 return VINF_SUCCESS;
2748 }
2749
2750 /**
2751 * @opdone
2752 * @opmnemonic ud0f13m3
2753 * @opcode 0x13
2754 * @opcodesub 11 mr/reg
2755 * @oppfx none
2756 * @opunused immediate
2757 * @opcpuid sse
2758 * @optest ->
2759 */
2760 return IEMOP_RAISE_INVALID_OPCODE();
2761}
2762
2763
2764/**
2765 * @opcode 0x13
2766 * @opcodesub !11 mr/reg
2767 * @oppfx 0x66
2768 * @opcpuid sse2
2769 * @opgroup og_sse2_pcksclr_datamove
2770 * @opxcpttype 5
2771 * @optest op1=1 op2=2 -> op1=2
2772 * @optest op1=0 op2=-42 -> op1=-42
2773 */
2774FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2775{
2776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2777 if (IEM_IS_MODRM_MEM_MODE(bRm))
2778 {
2779 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2780 IEM_MC_BEGIN(0, 2);
2781 IEM_MC_LOCAL(uint64_t, uSrc);
2782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2783
2784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2786 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2787 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2788
2789 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2790 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2791
2792 IEM_MC_ADVANCE_RIP();
2793 IEM_MC_END();
2794 return VINF_SUCCESS;
2795 }
2796
2797 /**
2798 * @opdone
2799 * @opmnemonic ud660f13m3
2800 * @opcode 0x13
2801 * @opcodesub 11 mr/reg
2802 * @oppfx 0x66
2803 * @opunused immediate
2804 * @opcpuid sse
2805 * @optest ->
2806 */
2807 return IEMOP_RAISE_INVALID_OPCODE();
2808}
2809
2810
2811/**
2812 * @opmnemonic udf30f13
2813 * @opcode 0x13
2814 * @oppfx 0xf3
2815 * @opunused intel-modrm
2816 * @opcpuid sse
2817 * @optest ->
2818 * @opdone
2819 */
2820
2821/**
2822 * @opmnemonic udf20f13
2823 * @opcode 0x13
2824 * @oppfx 0xf2
2825 * @opunused intel-modrm
2826 * @opcpuid sse
2827 * @optest ->
2828 * @opdone
2829 */
2830
2831/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2832FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2833{
2834 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2835 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2836}
2837
2838
2839/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2840FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2841{
2842 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2843 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2844}
2845
2846
2847/**
2848 * @opdone
2849 * @opmnemonic udf30f14
2850 * @opcode 0x14
2851 * @oppfx 0xf3
2852 * @opunused intel-modrm
2853 * @opcpuid sse
2854 * @optest ->
2855 * @opdone
2856 */
2857
2858/**
2859 * @opmnemonic udf20f14
2860 * @opcode 0x14
2861 * @oppfx 0xf2
2862 * @opunused intel-modrm
2863 * @opcpuid sse
2864 * @optest ->
2865 * @opdone
2866 */
2867
2868/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2869FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2870{
2871 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2872 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2873}
2874
2875
2876/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2877FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2878{
2879 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2880 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2881}
2882
2883
2884/* Opcode 0xf3 0x0f 0x15 - invalid */
2885/* Opcode 0xf2 0x0f 0x15 - invalid */
2886
2887/**
2888 * @opdone
2889 * @opmnemonic udf30f15
2890 * @opcode 0x15
2891 * @oppfx 0xf3
2892 * @opunused intel-modrm
2893 * @opcpuid sse
2894 * @optest ->
2895 * @opdone
2896 */
2897
2898/**
2899 * @opmnemonic udf20f15
2900 * @opcode 0x15
2901 * @oppfx 0xf2
2902 * @opunused intel-modrm
2903 * @opcpuid sse
2904 * @optest ->
2905 * @opdone
2906 */
2907
2908FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2909{
2910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2911 if (IEM_IS_MODRM_REG_MODE(bRm))
2912 {
2913 /**
2914 * @opcode 0x16
2915 * @opcodesub 11 mr/reg
2916 * @oppfx none
2917 * @opcpuid sse
2918 * @opgroup og_sse_simdfp_datamove
2919 * @opxcpttype 5
2920 * @optest op1=1 op2=2 -> op1=2
2921 * @optest op1=0 op2=-42 -> op1=-42
2922 */
2923 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2924
2925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2926 IEM_MC_BEGIN(0, 1);
2927 IEM_MC_LOCAL(uint64_t, uSrc);
2928
2929 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2930 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2931 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2932 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2933
2934 IEM_MC_ADVANCE_RIP();
2935 IEM_MC_END();
2936 }
2937 else
2938 {
2939 /**
2940 * @opdone
2941 * @opcode 0x16
2942 * @opcodesub !11 mr/reg
2943 * @oppfx none
2944 * @opcpuid sse
2945 * @opgroup og_sse_simdfp_datamove
2946 * @opxcpttype 5
2947 * @optest op1=1 op2=2 -> op1=2
2948 * @optest op1=0 op2=-42 -> op1=-42
2949 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2950 */
2951 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2952
2953 IEM_MC_BEGIN(0, 2);
2954 IEM_MC_LOCAL(uint64_t, uSrc);
2955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2956
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2959 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2960 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2961
2962 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2963 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2964
2965 IEM_MC_ADVANCE_RIP();
2966 IEM_MC_END();
2967 }
2968 return VINF_SUCCESS;
2969}
2970
2971
2972/**
2973 * @opcode 0x16
2974 * @opcodesub !11 mr/reg
2975 * @oppfx 0x66
2976 * @opcpuid sse2
2977 * @opgroup og_sse2_pcksclr_datamove
2978 * @opxcpttype 5
2979 * @optest op1=1 op2=2 -> op1=2
2980 * @optest op1=0 op2=-42 -> op1=-42
2981 */
2982FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2983{
2984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2985 if (IEM_IS_MODRM_MEM_MODE(bRm))
2986 {
2987 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2988 IEM_MC_BEGIN(0, 2);
2989 IEM_MC_LOCAL(uint64_t, uSrc);
2990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2991
2992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2994 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2995 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2996
2997 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2998 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2999
3000 IEM_MC_ADVANCE_RIP();
3001 IEM_MC_END();
3002 return VINF_SUCCESS;
3003 }
3004
3005 /**
3006 * @opdone
3007 * @opmnemonic ud660f16m3
3008 * @opcode 0x16
3009 * @opcodesub 11 mr/reg
3010 * @oppfx 0x66
3011 * @opunused immediate
3012 * @opcpuid sse
3013 * @optest ->
3014 */
3015 return IEMOP_RAISE_INVALID_OPCODE();
3016}
3017
3018
3019/**
3020 * @opcode 0x16
3021 * @oppfx 0xf3
3022 * @opcpuid sse3
3023 * @opgroup og_sse3_pcksclr_datamove
3024 * @opxcpttype 4
3025 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3026 * op1=0x00000002000000020000000100000001
3027 */
3028FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3029{
3030 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3032 if (IEM_IS_MODRM_REG_MODE(bRm))
3033 {
3034 /*
3035 * Register, register.
3036 */
3037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3038 IEM_MC_BEGIN(2, 0);
3039 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3040 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3041
3042 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3043 IEM_MC_PREPARE_SSE_USAGE();
3044
3045 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3046 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3047 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3048
3049 IEM_MC_ADVANCE_RIP();
3050 IEM_MC_END();
3051 }
3052 else
3053 {
3054 /*
3055 * Register, memory.
3056 */
3057 IEM_MC_BEGIN(2, 2);
3058 IEM_MC_LOCAL(RTUINT128U, uSrc);
3059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3060 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3061 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3062
3063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3065 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3066 IEM_MC_PREPARE_SSE_USAGE();
3067
3068 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3069 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3070 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3071
3072 IEM_MC_ADVANCE_RIP();
3073 IEM_MC_END();
3074 }
3075 return VINF_SUCCESS;
3076}
3077
3078/**
3079 * @opdone
3080 * @opmnemonic udf30f16
3081 * @opcode 0x16
3082 * @oppfx 0xf2
3083 * @opunused intel-modrm
3084 * @opcpuid sse
3085 * @optest ->
3086 * @opdone
3087 */
3088
3089
3090/**
3091 * @opcode 0x17
3092 * @opcodesub !11 mr/reg
3093 * @oppfx none
3094 * @opcpuid sse
3095 * @opgroup og_sse_simdfp_datamove
3096 * @opxcpttype 5
3097 * @optest op1=1 op2=2 -> op1=2
3098 * @optest op1=0 op2=-42 -> op1=-42
3099 */
3100FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3101{
3102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3103 if (IEM_IS_MODRM_MEM_MODE(bRm))
3104 {
3105 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3106
3107 IEM_MC_BEGIN(0, 2);
3108 IEM_MC_LOCAL(uint64_t, uSrc);
3109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3110
3111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3115
3116 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3117 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3118
3119 IEM_MC_ADVANCE_RIP();
3120 IEM_MC_END();
3121 return VINF_SUCCESS;
3122 }
3123
3124 /**
3125 * @opdone
3126 * @opmnemonic ud0f17m3
3127 * @opcode 0x17
3128 * @opcodesub 11 mr/reg
3129 * @oppfx none
3130 * @opunused immediate
3131 * @opcpuid sse
3132 * @optest ->
3133 */
3134 return IEMOP_RAISE_INVALID_OPCODE();
3135}
3136
3137
3138/**
3139 * @opcode 0x17
3140 * @opcodesub !11 mr/reg
3141 * @oppfx 0x66
3142 * @opcpuid sse2
3143 * @opgroup og_sse2_pcksclr_datamove
3144 * @opxcpttype 5
3145 * @optest op1=1 op2=2 -> op1=2
3146 * @optest op1=0 op2=-42 -> op1=-42
3147 */
3148FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3149{
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 if (IEM_IS_MODRM_MEM_MODE(bRm))
3152 {
3153 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3154
3155 IEM_MC_BEGIN(0, 2);
3156 IEM_MC_LOCAL(uint64_t, uSrc);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3158
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3162 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3163
3164 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3165 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3166
3167 IEM_MC_ADVANCE_RIP();
3168 IEM_MC_END();
3169 return VINF_SUCCESS;
3170 }
3171
3172 /**
3173 * @opdone
3174 * @opmnemonic ud660f17m3
3175 * @opcode 0x17
3176 * @opcodesub 11 mr/reg
3177 * @oppfx 0x66
3178 * @opunused immediate
3179 * @opcpuid sse
3180 * @optest ->
3181 */
3182 return IEMOP_RAISE_INVALID_OPCODE();
3183}
3184
3185
3186/**
3187 * @opdone
3188 * @opmnemonic udf30f17
3189 * @opcode 0x17
3190 * @oppfx 0xf3
3191 * @opunused intel-modrm
3192 * @opcpuid sse
3193 * @optest ->
3194 * @opdone
3195 */
3196
3197/**
3198 * @opmnemonic udf20f17
3199 * @opcode 0x17
3200 * @oppfx 0xf2
3201 * @opunused intel-modrm
3202 * @opcpuid sse
3203 * @optest ->
3204 * @opdone
3205 */
3206
3207
3208/** Opcode 0x0f 0x18. */
3209FNIEMOP_DEF(iemOp_prefetch_Grp16)
3210{
3211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3212 if (IEM_IS_MODRM_MEM_MODE(bRm))
3213 {
3214 switch (IEM_GET_MODRM_REG_8(bRm))
3215 {
3216 case 4: /* Aliased to /0 for the time being according to AMD. */
3217 case 5: /* Aliased to /0 for the time being according to AMD. */
3218 case 6: /* Aliased to /0 for the time being according to AMD. */
3219 case 7: /* Aliased to /0 for the time being according to AMD. */
3220 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3221 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3222 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3223 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3225 }
3226
3227 IEM_MC_BEGIN(0, 1);
3228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3231 /* Currently a NOP. */
3232 NOREF(GCPtrEffSrc);
3233 IEM_MC_ADVANCE_RIP();
3234 IEM_MC_END();
3235 return VINF_SUCCESS;
3236 }
3237
3238 return IEMOP_RAISE_INVALID_OPCODE();
3239}
3240
3241
3242/** Opcode 0x0f 0x19..0x1f. */
3243FNIEMOP_DEF(iemOp_nop_Ev)
3244{
3245 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3247 if (IEM_IS_MODRM_REG_MODE(bRm))
3248 {
3249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3250 IEM_MC_BEGIN(0, 0);
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 }
3254 else
3255 {
3256 IEM_MC_BEGIN(0, 1);
3257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3260 /* Currently a NOP. */
3261 NOREF(GCPtrEffSrc);
3262 IEM_MC_ADVANCE_RIP();
3263 IEM_MC_END();
3264 }
3265 return VINF_SUCCESS;
3266}
3267
3268
3269/** Opcode 0x0f 0x20. */
3270FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3271{
3272 /* mod is ignored, as is operand size overrides. */
3273 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3274 IEMOP_HLP_MIN_386();
3275 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3276 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3277 else
3278 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3279
3280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3281 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3282 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3283 {
3284 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3285 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3286 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3287 iCrReg |= 8;
3288 }
3289 switch (iCrReg)
3290 {
3291 case 0: case 2: case 3: case 4: case 8:
3292 break;
3293 default:
3294 return IEMOP_RAISE_INVALID_OPCODE();
3295 }
3296 IEMOP_HLP_DONE_DECODING();
3297
3298 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3299}
3300
3301
3302/** Opcode 0x0f 0x21. */
3303FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3304{
3305 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3306 IEMOP_HLP_MIN_386();
3307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3310 return IEMOP_RAISE_INVALID_OPCODE();
3311 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3312 IEM_GET_MODRM_RM(pVCpu, bRm),
3313 IEM_GET_MODRM_REG_8(bRm));
3314}
3315
3316
3317/** Opcode 0x0f 0x22. */
3318FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3319{
3320 /* mod is ignored, as is operand size overrides. */
3321 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3322 IEMOP_HLP_MIN_386();
3323 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3324 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3325 else
3326 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3327
3328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3329 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3330 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3331 {
3332 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3333 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3334 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3335 iCrReg |= 8;
3336 }
3337 switch (iCrReg)
3338 {
3339 case 0: case 2: case 3: case 4: case 8:
3340 break;
3341 default:
3342 return IEMOP_RAISE_INVALID_OPCODE();
3343 }
3344 IEMOP_HLP_DONE_DECODING();
3345
3346 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3347}
3348
3349
3350/** Opcode 0x0f 0x23. */
3351FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3352{
3353 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3354 IEMOP_HLP_MIN_386();
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3358 return IEMOP_RAISE_INVALID_OPCODE();
3359 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3360 IEM_GET_MODRM_REG_8(bRm),
3361 IEM_GET_MODRM_RM(pVCpu, bRm));
3362}
3363
3364
3365/** Opcode 0x0f 0x24. */
3366FNIEMOP_DEF(iemOp_mov_Rd_Td)
3367{
3368 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3369 IEMOP_HLP_MIN_386();
3370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3372 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3373 return IEMOP_RAISE_INVALID_OPCODE();
3374 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3375 IEM_GET_MODRM_RM(pVCpu, bRm),
3376 IEM_GET_MODRM_REG_8(bRm));
3377}
3378
3379
3380/** Opcode 0x0f 0x26. */
3381FNIEMOP_DEF(iemOp_mov_Td_Rd)
3382{
3383 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3384 IEMOP_HLP_MIN_386();
3385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3387 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3388 return IEMOP_RAISE_INVALID_OPCODE();
3389 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3390 IEM_GET_MODRM_REG_8(bRm),
3391 IEM_GET_MODRM_RM(pVCpu, bRm));
3392}
3393
3394
3395/**
3396 * @opcode 0x28
3397 * @oppfx none
3398 * @opcpuid sse
3399 * @opgroup og_sse_simdfp_datamove
3400 * @opxcpttype 1
3401 * @optest op1=1 op2=2 -> op1=2
3402 * @optest op1=0 op2=-42 -> op1=-42
3403 */
3404FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3405{
3406 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3408 if (IEM_IS_MODRM_REG_MODE(bRm))
3409 {
3410 /*
3411 * Register, register.
3412 */
3413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3414 IEM_MC_BEGIN(0, 0);
3415 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3417 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3418 IEM_GET_MODRM_RM(pVCpu, bRm));
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 }
3422 else
3423 {
3424 /*
3425 * Register, memory.
3426 */
3427 IEM_MC_BEGIN(0, 2);
3428 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3430
3431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3433 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3435
3436 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3437 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3438
3439 IEM_MC_ADVANCE_RIP();
3440 IEM_MC_END();
3441 }
3442 return VINF_SUCCESS;
3443}
3444
3445/**
3446 * @opcode 0x28
3447 * @oppfx 66
3448 * @opcpuid sse2
3449 * @opgroup og_sse2_pcksclr_datamove
3450 * @opxcpttype 1
3451 * @optest op1=1 op2=2 -> op1=2
3452 * @optest op1=0 op2=-42 -> op1=-42
3453 */
3454FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3455{
3456 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3458 if (IEM_IS_MODRM_REG_MODE(bRm))
3459 {
3460 /*
3461 * Register, register.
3462 */
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3464 IEM_MC_BEGIN(0, 0);
3465 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3467 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3468 IEM_GET_MODRM_RM(pVCpu, bRm));
3469 IEM_MC_ADVANCE_RIP();
3470 IEM_MC_END();
3471 }
3472 else
3473 {
3474 /*
3475 * Register, memory.
3476 */
3477 IEM_MC_BEGIN(0, 2);
3478 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3480
3481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3485
3486 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3487 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3488
3489 IEM_MC_ADVANCE_RIP();
3490 IEM_MC_END();
3491 }
3492 return VINF_SUCCESS;
3493}
3494
3495/* Opcode 0xf3 0x0f 0x28 - invalid */
3496/* Opcode 0xf2 0x0f 0x28 - invalid */
3497
3498/**
3499 * @opcode 0x29
3500 * @oppfx none
3501 * @opcpuid sse
3502 * @opgroup og_sse_simdfp_datamove
3503 * @opxcpttype 1
3504 * @optest op1=1 op2=2 -> op1=2
3505 * @optest op1=0 op2=-42 -> op1=-42
3506 */
3507FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3508{
3509 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3511 if (IEM_IS_MODRM_REG_MODE(bRm))
3512 {
3513 /*
3514 * Register, register.
3515 */
3516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3517 IEM_MC_BEGIN(0, 0);
3518 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3519 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3520 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3521 IEM_GET_MODRM_REG(pVCpu, bRm));
3522 IEM_MC_ADVANCE_RIP();
3523 IEM_MC_END();
3524 }
3525 else
3526 {
3527 /*
3528 * Memory, register.
3529 */
3530 IEM_MC_BEGIN(0, 2);
3531 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3533
3534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3538
3539 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3540 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3541
3542 IEM_MC_ADVANCE_RIP();
3543 IEM_MC_END();
3544 }
3545 return VINF_SUCCESS;
3546}
3547
3548/**
3549 * @opcode 0x29
3550 * @oppfx 66
3551 * @opcpuid sse2
3552 * @opgroup og_sse2_pcksclr_datamove
3553 * @opxcpttype 1
3554 * @optest op1=1 op2=2 -> op1=2
3555 * @optest op1=0 op2=-42 -> op1=-42
3556 */
3557FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3558{
3559 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3561 if (IEM_IS_MODRM_REG_MODE(bRm))
3562 {
3563 /*
3564 * Register, register.
3565 */
3566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3567 IEM_MC_BEGIN(0, 0);
3568 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3570 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3571 IEM_GET_MODRM_REG(pVCpu, bRm));
3572 IEM_MC_ADVANCE_RIP();
3573 IEM_MC_END();
3574 }
3575 else
3576 {
3577 /*
3578 * Memory, register.
3579 */
3580 IEM_MC_BEGIN(0, 2);
3581 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3583
3584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3586 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3587 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3588
3589 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3590 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3591
3592 IEM_MC_ADVANCE_RIP();
3593 IEM_MC_END();
3594 }
3595 return VINF_SUCCESS;
3596}
3597
3598/* Opcode 0xf3 0x0f 0x29 - invalid */
3599/* Opcode 0xf2 0x0f 0x29 - invalid */
3600
3601
3602/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3603FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3604{
3605 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3607 if (IEM_IS_MODRM_REG_MODE(bRm))
3608 {
3609 /*
3610 * Register, register.
3611 */
3612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3613
3614 IEM_MC_BEGIN(3, 1);
3615 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3616 IEM_MC_LOCAL(X86XMMREG, Dst);
3617 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3618 IEM_MC_ARG(uint64_t, u64Src, 2);
3619 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3620 IEM_MC_PREPARE_FPU_USAGE();
3621 IEM_MC_FPU_TO_MMX_MODE();
3622
3623 IEM_MC_REF_MXCSR(pfMxcsr);
3624 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3625 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3626
3627 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3628 IEM_MC_IF_MXCSR_XCPT_PENDING()
3629 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3630 IEM_MC_ELSE()
3631 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3632 IEM_MC_ENDIF();
3633
3634 IEM_MC_ADVANCE_RIP();
3635 IEM_MC_END();
3636 }
3637 else
3638 {
3639 /*
3640 * Register, memory.
3641 */
3642 IEM_MC_BEGIN(3, 3);
3643 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3644 IEM_MC_LOCAL(X86XMMREG, Dst);
3645 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3646 IEM_MC_ARG(uint64_t, u64Src, 2);
3647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3648
3649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3652 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3653
3654 IEM_MC_PREPARE_FPU_USAGE();
3655 IEM_MC_FPU_TO_MMX_MODE();
3656 IEM_MC_REF_MXCSR(pfMxcsr);
3657
3658 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3659 IEM_MC_IF_MXCSR_XCPT_PENDING()
3660 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3661 IEM_MC_ELSE()
3662 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3663 IEM_MC_ENDIF();
3664
3665 IEM_MC_ADVANCE_RIP();
3666 IEM_MC_END();
3667 }
3668 return VINF_SUCCESS;
3669}
3670
3671
3672/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3673FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3674{
3675 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3677 if (IEM_IS_MODRM_REG_MODE(bRm))
3678 {
3679 /*
3680 * Register, register.
3681 */
3682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3683
3684 IEM_MC_BEGIN(3, 1);
3685 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3686 IEM_MC_LOCAL(X86XMMREG, Dst);
3687 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3688 IEM_MC_ARG(uint64_t, u64Src, 2);
3689 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3690 IEM_MC_PREPARE_FPU_USAGE();
3691 IEM_MC_FPU_TO_MMX_MODE();
3692
3693 IEM_MC_REF_MXCSR(pfMxcsr);
3694 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3695
3696 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3697 IEM_MC_IF_MXCSR_XCPT_PENDING()
3698 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3699 IEM_MC_ELSE()
3700 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3701 IEM_MC_ENDIF();
3702
3703 IEM_MC_ADVANCE_RIP();
3704 IEM_MC_END();
3705 }
3706 else
3707 {
3708 /*
3709 * Register, memory.
3710 */
3711 IEM_MC_BEGIN(3, 3);
3712 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3713 IEM_MC_LOCAL(X86XMMREG, Dst);
3714 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3715 IEM_MC_ARG(uint64_t, u64Src, 2);
3716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3717
3718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3721 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3722
3723 /* Doesn't cause a transition to MMX mode. */
3724 IEM_MC_PREPARE_SSE_USAGE();
3725 IEM_MC_REF_MXCSR(pfMxcsr);
3726
3727 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3728 IEM_MC_IF_MXCSR_XCPT_PENDING()
3729 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3730 IEM_MC_ELSE()
3731 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3732 IEM_MC_ENDIF();
3733
3734 IEM_MC_ADVANCE_RIP();
3735 IEM_MC_END();
3736 }
3737 return VINF_SUCCESS;
3738}
3739
3740
3741/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3742FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3743{
3744 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3745
3746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3747 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3748 {
3749 if (IEM_IS_MODRM_REG_MODE(bRm))
3750 {
3751 /* XMM, greg64 */
3752 IEM_MC_BEGIN(3, 4);
3753 IEM_MC_LOCAL(uint32_t, fMxcsr);
3754 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3755 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3756 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3757 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3758
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3761 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3762
3763 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3764 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3765 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3766 IEM_MC_IF_MXCSR_XCPT_PENDING()
3767 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3768 IEM_MC_ELSE()
3769 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3770 IEM_MC_ENDIF();
3771
3772 IEM_MC_ADVANCE_RIP();
3773 IEM_MC_END();
3774 }
3775 else
3776 {
3777 /* XMM, [mem64] */
3778 IEM_MC_BEGIN(3, 4);
3779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3780 IEM_MC_LOCAL(uint32_t, fMxcsr);
3781 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3782 IEM_MC_LOCAL(int64_t, i64Src);
3783 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3784 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3785 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3786
3787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3790 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3791
3792 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3793 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3794 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3795 IEM_MC_IF_MXCSR_XCPT_PENDING()
3796 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3797 IEM_MC_ELSE()
3798 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3799 IEM_MC_ENDIF();
3800
3801 IEM_MC_ADVANCE_RIP();
3802 IEM_MC_END();
3803 }
3804 }
3805 else
3806 {
3807 if (IEM_IS_MODRM_REG_MODE(bRm))
3808 {
3809 /* greg, XMM */
3810 IEM_MC_BEGIN(3, 4);
3811 IEM_MC_LOCAL(uint32_t, fMxcsr);
3812 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3813 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3814 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3815 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3816
3817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3819 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3820
3821 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3822 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3823 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3824 IEM_MC_IF_MXCSR_XCPT_PENDING()
3825 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3826 IEM_MC_ELSE()
3827 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3828 IEM_MC_ENDIF();
3829
3830 IEM_MC_ADVANCE_RIP();
3831 IEM_MC_END();
3832 }
3833 else
3834 {
3835 /* greg, [mem] */
3836 IEM_MC_BEGIN(3, 4);
3837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3838 IEM_MC_LOCAL(uint32_t, fMxcsr);
3839 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3840 IEM_MC_LOCAL(int32_t, i32Src);
3841 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3842 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3843 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3844
3845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3847 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3848 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3849
3850 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3851 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3852 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3853 IEM_MC_IF_MXCSR_XCPT_PENDING()
3854 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3855 IEM_MC_ELSE()
3856 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3857 IEM_MC_ENDIF();
3858
3859 IEM_MC_ADVANCE_RIP();
3860 IEM_MC_END();
3861 }
3862 }
3863 return VINF_SUCCESS;
3864}
3865
3866
3867/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3868FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3869{
3870 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3871
3872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3873 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3874 {
3875 if (IEM_IS_MODRM_REG_MODE(bRm))
3876 {
3877 /* XMM, greg64 */
3878 IEM_MC_BEGIN(3, 4);
3879 IEM_MC_LOCAL(uint32_t, fMxcsr);
3880 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3881 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3882 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3883 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3884
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3887 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3888
3889 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3890 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3891 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3892 IEM_MC_IF_MXCSR_XCPT_PENDING()
3893 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3894 IEM_MC_ELSE()
3895 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3896 IEM_MC_ENDIF();
3897
3898 IEM_MC_ADVANCE_RIP();
3899 IEM_MC_END();
3900 }
3901 else
3902 {
3903 /* XMM, [mem64] */
3904 IEM_MC_BEGIN(3, 4);
3905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3906 IEM_MC_LOCAL(uint32_t, fMxcsr);
3907 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3908 IEM_MC_LOCAL(int64_t, i64Src);
3909 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3910 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3911 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3912
3913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3916 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3917
3918 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3919 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3920 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3921 IEM_MC_IF_MXCSR_XCPT_PENDING()
3922 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3923 IEM_MC_ELSE()
3924 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3925 IEM_MC_ENDIF();
3926
3927 IEM_MC_ADVANCE_RIP();
3928 IEM_MC_END();
3929 }
3930 }
3931 else
3932 {
3933 if (IEM_IS_MODRM_REG_MODE(bRm))
3934 {
3935 /* greg, XMM */
3936 IEM_MC_BEGIN(3, 4);
3937 IEM_MC_LOCAL(uint32_t, fMxcsr);
3938 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3939 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3940 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3941 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3942
3943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3944 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3945 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3946
3947 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3948 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3949 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3950 IEM_MC_IF_MXCSR_XCPT_PENDING()
3951 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3952 IEM_MC_ELSE()
3953 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3954 IEM_MC_ENDIF();
3955
3956 IEM_MC_ADVANCE_RIP();
3957 IEM_MC_END();
3958 }
3959 else
3960 {
3961 /* greg, [mem] */
3962 IEM_MC_BEGIN(3, 4);
3963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3964 IEM_MC_LOCAL(uint32_t, fMxcsr);
3965 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3966 IEM_MC_LOCAL(int32_t, i32Src);
3967 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3968 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3969 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3970
3971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3973 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3974 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3975
3976 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3977 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3978 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3979 IEM_MC_IF_MXCSR_XCPT_PENDING()
3980 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3981 IEM_MC_ELSE()
3982 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3983 IEM_MC_ENDIF();
3984
3985 IEM_MC_ADVANCE_RIP();
3986 IEM_MC_END();
3987 }
3988 }
3989 return VINF_SUCCESS;
3990}
3991
3992
3993/**
3994 * @opcode 0x2b
3995 * @opcodesub !11 mr/reg
3996 * @oppfx none
3997 * @opcpuid sse
3998 * @opgroup og_sse1_cachect
3999 * @opxcpttype 1
4000 * @optest op1=1 op2=2 -> op1=2
4001 * @optest op1=0 op2=-42 -> op1=-42
4002 */
4003FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4004{
4005 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4007 if (IEM_IS_MODRM_MEM_MODE(bRm))
4008 {
4009 /*
4010 * memory, register.
4011 */
4012 IEM_MC_BEGIN(0, 2);
4013 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4015
4016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4020
4021 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4022 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4023
4024 IEM_MC_ADVANCE_RIP();
4025 IEM_MC_END();
4026 }
4027 /* The register, register encoding is invalid. */
4028 else
4029 return IEMOP_RAISE_INVALID_OPCODE();
4030 return VINF_SUCCESS;
4031}
4032
4033/**
4034 * @opcode 0x2b
4035 * @opcodesub !11 mr/reg
4036 * @oppfx 0x66
4037 * @opcpuid sse2
4038 * @opgroup og_sse2_cachect
4039 * @opxcpttype 1
4040 * @optest op1=1 op2=2 -> op1=2
4041 * @optest op1=0 op2=-42 -> op1=-42
4042 */
4043FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4044{
4045 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4047 if (IEM_IS_MODRM_MEM_MODE(bRm))
4048 {
4049 /*
4050 * memory, register.
4051 */
4052 IEM_MC_BEGIN(0, 2);
4053 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4055
4056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4058 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4059 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4060
4061 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4062 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4063
4064 IEM_MC_ADVANCE_RIP();
4065 IEM_MC_END();
4066 }
4067 /* The register, register encoding is invalid. */
4068 else
4069 return IEMOP_RAISE_INVALID_OPCODE();
4070 return VINF_SUCCESS;
4071}
4072/* Opcode 0xf3 0x0f 0x2b - invalid */
4073/* Opcode 0xf2 0x0f 0x2b - invalid */
4074
4075
4076/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4077FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4078{
4079 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4081 if (IEM_IS_MODRM_REG_MODE(bRm))
4082 {
4083 /*
4084 * Register, register.
4085 */
4086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4087
4088 IEM_MC_BEGIN(3, 1);
4089 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4090 IEM_MC_LOCAL(uint64_t, u64Dst);
4091 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4092 IEM_MC_ARG(uint64_t, u64Src, 2);
4093 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4094 IEM_MC_PREPARE_FPU_USAGE();
4095 IEM_MC_FPU_TO_MMX_MODE();
4096
4097 IEM_MC_REF_MXCSR(pfMxcsr);
4098 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4099
4100 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4101 IEM_MC_IF_MXCSR_XCPT_PENDING()
4102 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4103 IEM_MC_ELSE()
4104 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4105 IEM_MC_ENDIF();
4106
4107 IEM_MC_ADVANCE_RIP();
4108 IEM_MC_END();
4109 }
4110 else
4111 {
4112 /*
4113 * Register, memory.
4114 */
4115 IEM_MC_BEGIN(3, 2);
4116 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4117 IEM_MC_LOCAL(uint64_t, u64Dst);
4118 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4119 IEM_MC_ARG(uint64_t, u64Src, 2);
4120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4121
4122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4124 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4125 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4126
4127 IEM_MC_PREPARE_FPU_USAGE();
4128 IEM_MC_FPU_TO_MMX_MODE();
4129 IEM_MC_REF_MXCSR(pfMxcsr);
4130
4131 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4132 IEM_MC_IF_MXCSR_XCPT_PENDING()
4133 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4134 IEM_MC_ELSE()
4135 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4136 IEM_MC_ENDIF();
4137
4138 IEM_MC_ADVANCE_RIP();
4139 IEM_MC_END();
4140 }
4141 return VINF_SUCCESS;
4142}
4143
4144
4145/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4146FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4147{
4148 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4150 if (IEM_IS_MODRM_REG_MODE(bRm))
4151 {
4152 /*
4153 * Register, register.
4154 */
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156
4157 IEM_MC_BEGIN(3, 1);
4158 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4159 IEM_MC_LOCAL(uint64_t, u64Dst);
4160 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4161 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4162 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4163 IEM_MC_PREPARE_FPU_USAGE();
4164 IEM_MC_FPU_TO_MMX_MODE();
4165
4166 IEM_MC_REF_MXCSR(pfMxcsr);
4167 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4168
4169 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4170 IEM_MC_IF_MXCSR_XCPT_PENDING()
4171 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4172 IEM_MC_ELSE()
4173 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4174 IEM_MC_ENDIF();
4175
4176 IEM_MC_ADVANCE_RIP();
4177 IEM_MC_END();
4178 }
4179 else
4180 {
4181 /*
4182 * Register, memory.
4183 */
4184 IEM_MC_BEGIN(3, 3);
4185 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4186 IEM_MC_LOCAL(uint64_t, u64Dst);
4187 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4188 IEM_MC_LOCAL(X86XMMREG, uSrc);
4189 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4191
4192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4194 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4195 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4196
4197 IEM_MC_PREPARE_FPU_USAGE();
4198 IEM_MC_FPU_TO_MMX_MODE();
4199
4200 IEM_MC_REF_MXCSR(pfMxcsr);
4201
4202 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4203 IEM_MC_IF_MXCSR_XCPT_PENDING()
4204 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4205 IEM_MC_ELSE()
4206 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4207 IEM_MC_ENDIF();
4208
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4217FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4218{
4219 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4220
4221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4222 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4223 {
4224 if (IEM_IS_MODRM_REG_MODE(bRm))
4225 {
4226 /* greg64, XMM */
4227 IEM_MC_BEGIN(3, 4);
4228 IEM_MC_LOCAL(uint32_t, fMxcsr);
4229 IEM_MC_LOCAL(int64_t, i64Dst);
4230 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4231 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4232 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4233
4234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4235 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4236 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4237
4238 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4239 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4240 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4241 IEM_MC_IF_MXCSR_XCPT_PENDING()
4242 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4243 IEM_MC_ELSE()
4244 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4245 IEM_MC_ENDIF();
4246
4247 IEM_MC_ADVANCE_RIP();
4248 IEM_MC_END();
4249 }
4250 else
4251 {
4252 /* greg64, [mem64] */
4253 IEM_MC_BEGIN(3, 4);
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4255 IEM_MC_LOCAL(uint32_t, fMxcsr);
4256 IEM_MC_LOCAL(int64_t, i64Dst);
4257 IEM_MC_LOCAL(uint32_t, u32Src);
4258 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4259 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4260 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4261
4262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4264 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4265 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4266
4267 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4268 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4269 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4270 IEM_MC_IF_MXCSR_XCPT_PENDING()
4271 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4272 IEM_MC_ELSE()
4273 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4274 IEM_MC_ENDIF();
4275
4276 IEM_MC_ADVANCE_RIP();
4277 IEM_MC_END();
4278 }
4279 }
4280 else
4281 {
4282 if (IEM_IS_MODRM_REG_MODE(bRm))
4283 {
4284 /* greg, XMM */
4285 IEM_MC_BEGIN(3, 4);
4286 IEM_MC_LOCAL(uint32_t, fMxcsr);
4287 IEM_MC_LOCAL(int32_t, i32Dst);
4288 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4289 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4290 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4291
4292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4293 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4294 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4295
4296 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4297 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4298 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4299 IEM_MC_IF_MXCSR_XCPT_PENDING()
4300 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4301 IEM_MC_ELSE()
4302 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4303 IEM_MC_ENDIF();
4304
4305 IEM_MC_ADVANCE_RIP();
4306 IEM_MC_END();
4307 }
4308 else
4309 {
4310 /* greg, [mem] */
4311 IEM_MC_BEGIN(3, 4);
4312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4313 IEM_MC_LOCAL(uint32_t, fMxcsr);
4314 IEM_MC_LOCAL(int32_t, i32Dst);
4315 IEM_MC_LOCAL(uint32_t, u32Src);
4316 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4317 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4318 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4319
4320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4323 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4324
4325 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4326 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4327 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4328 IEM_MC_IF_MXCSR_XCPT_PENDING()
4329 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4330 IEM_MC_ELSE()
4331 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4332 IEM_MC_ENDIF();
4333
4334 IEM_MC_ADVANCE_RIP();
4335 IEM_MC_END();
4336 }
4337 }
4338 return VINF_SUCCESS;
4339}
4340
4341
4342/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4343FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4344{
4345 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4346
4347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4348 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4349 {
4350 if (IEM_IS_MODRM_REG_MODE(bRm))
4351 {
4352 /* greg64, XMM */
4353 IEM_MC_BEGIN(3, 4);
4354 IEM_MC_LOCAL(uint32_t, fMxcsr);
4355 IEM_MC_LOCAL(int64_t, i64Dst);
4356 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4357 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4358 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4359
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4361 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4362 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4363
4364 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4365 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4366 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4367 IEM_MC_IF_MXCSR_XCPT_PENDING()
4368 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4369 IEM_MC_ELSE()
4370 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4371 IEM_MC_ENDIF();
4372
4373 IEM_MC_ADVANCE_RIP();
4374 IEM_MC_END();
4375 }
4376 else
4377 {
4378 /* greg64, [mem64] */
4379 IEM_MC_BEGIN(3, 4);
4380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4381 IEM_MC_LOCAL(uint32_t, fMxcsr);
4382 IEM_MC_LOCAL(int64_t, i64Dst);
4383 IEM_MC_LOCAL(uint64_t, u64Src);
4384 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4385 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4386 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4387
4388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4390 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4391 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4392
4393 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4394 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4395 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4396 IEM_MC_IF_MXCSR_XCPT_PENDING()
4397 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4398 IEM_MC_ELSE()
4399 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4400 IEM_MC_ENDIF();
4401
4402 IEM_MC_ADVANCE_RIP();
4403 IEM_MC_END();
4404 }
4405 }
4406 else
4407 {
4408 if (IEM_IS_MODRM_REG_MODE(bRm))
4409 {
4410 /* greg, XMM */
4411 IEM_MC_BEGIN(3, 4);
4412 IEM_MC_LOCAL(uint32_t, fMxcsr);
4413 IEM_MC_LOCAL(int32_t, i32Dst);
4414 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4415 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4416 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4417
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4419 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4420 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4421
4422 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4423 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4424 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4425 IEM_MC_IF_MXCSR_XCPT_PENDING()
4426 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4427 IEM_MC_ELSE()
4428 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4429 IEM_MC_ENDIF();
4430
4431 IEM_MC_ADVANCE_RIP();
4432 IEM_MC_END();
4433 }
4434 else
4435 {
4436 /* greg, [mem] */
4437 IEM_MC_BEGIN(3, 4);
4438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4439 IEM_MC_LOCAL(uint32_t, fMxcsr);
4440 IEM_MC_LOCAL(int32_t, i32Dst);
4441 IEM_MC_LOCAL(uint64_t, u64Src);
4442 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4443 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4444 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4445
4446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4448 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4449 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4450
4451 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4452 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4453 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4454 IEM_MC_IF_MXCSR_XCPT_PENDING()
4455 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4456 IEM_MC_ELSE()
4457 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4458 IEM_MC_ENDIF();
4459
4460 IEM_MC_ADVANCE_RIP();
4461 IEM_MC_END();
4462 }
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4469FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4470{
4471 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4473 if (IEM_IS_MODRM_REG_MODE(bRm))
4474 {
4475 /*
4476 * Register, register.
4477 */
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4479
4480 IEM_MC_BEGIN(3, 1);
4481 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4482 IEM_MC_LOCAL(uint64_t, u64Dst);
4483 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4484 IEM_MC_ARG(uint64_t, u64Src, 2);
4485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4486 IEM_MC_PREPARE_FPU_USAGE();
4487 IEM_MC_FPU_TO_MMX_MODE();
4488
4489 IEM_MC_REF_MXCSR(pfMxcsr);
4490 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4491
4492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4493 IEM_MC_IF_MXCSR_XCPT_PENDING()
4494 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4495 IEM_MC_ELSE()
4496 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4497 IEM_MC_ENDIF();
4498
4499 IEM_MC_ADVANCE_RIP();
4500 IEM_MC_END();
4501 }
4502 else
4503 {
4504 /*
4505 * Register, memory.
4506 */
4507 IEM_MC_BEGIN(3, 2);
4508 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4509 IEM_MC_LOCAL(uint64_t, u64Dst);
4510 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4511 IEM_MC_ARG(uint64_t, u64Src, 2);
4512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4513
4514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4516 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4517 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4518
4519 IEM_MC_PREPARE_FPU_USAGE();
4520 IEM_MC_FPU_TO_MMX_MODE();
4521 IEM_MC_REF_MXCSR(pfMxcsr);
4522
4523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4524 IEM_MC_IF_MXCSR_XCPT_PENDING()
4525 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4526 IEM_MC_ELSE()
4527 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4528 IEM_MC_ENDIF();
4529
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 return VINF_SUCCESS;
4534}
4535
4536
4537/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4538FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4539{
4540 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4542 if (IEM_IS_MODRM_REG_MODE(bRm))
4543 {
4544 /*
4545 * Register, register.
4546 */
4547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4548
4549 IEM_MC_BEGIN(3, 1);
4550 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4551 IEM_MC_LOCAL(uint64_t, u64Dst);
4552 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4553 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4554 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4555 IEM_MC_PREPARE_FPU_USAGE();
4556 IEM_MC_FPU_TO_MMX_MODE();
4557
4558 IEM_MC_REF_MXCSR(pfMxcsr);
4559 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4560
4561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4562 IEM_MC_IF_MXCSR_XCPT_PENDING()
4563 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4564 IEM_MC_ELSE()
4565 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4566 IEM_MC_ENDIF();
4567
4568 IEM_MC_ADVANCE_RIP();
4569 IEM_MC_END();
4570 }
4571 else
4572 {
4573 /*
4574 * Register, memory.
4575 */
4576 IEM_MC_BEGIN(3, 3);
4577 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4578 IEM_MC_LOCAL(uint64_t, u64Dst);
4579 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4580 IEM_MC_LOCAL(X86XMMREG, uSrc);
4581 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4583
4584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4586 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4587 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4588
4589 IEM_MC_PREPARE_FPU_USAGE();
4590 IEM_MC_FPU_TO_MMX_MODE();
4591
4592 IEM_MC_REF_MXCSR(pfMxcsr);
4593
4594 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4595 IEM_MC_IF_MXCSR_XCPT_PENDING()
4596 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4597 IEM_MC_ELSE()
4598 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4599 IEM_MC_ENDIF();
4600
4601 IEM_MC_ADVANCE_RIP();
4602 IEM_MC_END();
4603 }
4604 return VINF_SUCCESS;
4605}
4606
4607
4608/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4609FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4610{
4611 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4612
4613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4614 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4615 {
4616 if (IEM_IS_MODRM_REG_MODE(bRm))
4617 {
4618 /* greg64, XMM */
4619 IEM_MC_BEGIN(3, 4);
4620 IEM_MC_LOCAL(uint32_t, fMxcsr);
4621 IEM_MC_LOCAL(int64_t, i64Dst);
4622 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4623 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4624 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4625
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4628 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4629
4630 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4631 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4632 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4633 IEM_MC_IF_MXCSR_XCPT_PENDING()
4634 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4635 IEM_MC_ELSE()
4636 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4637 IEM_MC_ENDIF();
4638
4639 IEM_MC_ADVANCE_RIP();
4640 IEM_MC_END();
4641 }
4642 else
4643 {
4644 /* greg64, [mem64] */
4645 IEM_MC_BEGIN(3, 4);
4646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4647 IEM_MC_LOCAL(uint32_t, fMxcsr);
4648 IEM_MC_LOCAL(int64_t, i64Dst);
4649 IEM_MC_LOCAL(uint32_t, u32Src);
4650 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4651 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4652 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4653
4654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4656 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4657 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4658
4659 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4660 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4661 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4662 IEM_MC_IF_MXCSR_XCPT_PENDING()
4663 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4664 IEM_MC_ELSE()
4665 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4666 IEM_MC_ENDIF();
4667
4668 IEM_MC_ADVANCE_RIP();
4669 IEM_MC_END();
4670 }
4671 }
4672 else
4673 {
4674 if (IEM_IS_MODRM_REG_MODE(bRm))
4675 {
4676 /* greg, XMM */
4677 IEM_MC_BEGIN(3, 4);
4678 IEM_MC_LOCAL(uint32_t, fMxcsr);
4679 IEM_MC_LOCAL(int32_t, i32Dst);
4680 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4681 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4682 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4683
4684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4685 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4686 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4687
4688 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4689 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4690 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4691 IEM_MC_IF_MXCSR_XCPT_PENDING()
4692 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4693 IEM_MC_ELSE()
4694 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4695 IEM_MC_ENDIF();
4696
4697 IEM_MC_ADVANCE_RIP();
4698 IEM_MC_END();
4699 }
4700 else
4701 {
4702 /* greg, [mem] */
4703 IEM_MC_BEGIN(3, 4);
4704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4705 IEM_MC_LOCAL(uint32_t, fMxcsr);
4706 IEM_MC_LOCAL(int32_t, i32Dst);
4707 IEM_MC_LOCAL(uint32_t, u32Src);
4708 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4709 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4710 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4711
4712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4715 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4716
4717 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4718 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4719 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4720 IEM_MC_IF_MXCSR_XCPT_PENDING()
4721 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4722 IEM_MC_ELSE()
4723 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4724 IEM_MC_ENDIF();
4725
4726 IEM_MC_ADVANCE_RIP();
4727 IEM_MC_END();
4728 }
4729 }
4730 return VINF_SUCCESS;
4731}
4732
4733
4734/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4735FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4736{
4737 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4738
4739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4741 {
4742 if (IEM_IS_MODRM_REG_MODE(bRm))
4743 {
4744 /* greg64, XMM */
4745 IEM_MC_BEGIN(3, 4);
4746 IEM_MC_LOCAL(uint32_t, fMxcsr);
4747 IEM_MC_LOCAL(int64_t, i64Dst);
4748 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4749 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4750 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4751
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4753 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4754 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4755
4756 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4757 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4758 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4759 IEM_MC_IF_MXCSR_XCPT_PENDING()
4760 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4761 IEM_MC_ELSE()
4762 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4763 IEM_MC_ENDIF();
4764
4765 IEM_MC_ADVANCE_RIP();
4766 IEM_MC_END();
4767 }
4768 else
4769 {
4770 /* greg64, [mem64] */
4771 IEM_MC_BEGIN(3, 4);
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4773 IEM_MC_LOCAL(uint32_t, fMxcsr);
4774 IEM_MC_LOCAL(int64_t, i64Dst);
4775 IEM_MC_LOCAL(uint64_t, u64Src);
4776 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4777 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4778 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4779
4780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4783 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4784
4785 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4786 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4787 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4788 IEM_MC_IF_MXCSR_XCPT_PENDING()
4789 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4790 IEM_MC_ELSE()
4791 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4792 IEM_MC_ENDIF();
4793
4794 IEM_MC_ADVANCE_RIP();
4795 IEM_MC_END();
4796 }
4797 }
4798 else
4799 {
4800 if (IEM_IS_MODRM_REG_MODE(bRm))
4801 {
4802 /* greg, XMM */
4803 IEM_MC_BEGIN(3, 4);
4804 IEM_MC_LOCAL(uint32_t, fMxcsr);
4805 IEM_MC_LOCAL(int32_t, i32Dst);
4806 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4807 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4808 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4809
4810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4811 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4812 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4813
4814 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4815 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4816 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4817 IEM_MC_IF_MXCSR_XCPT_PENDING()
4818 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4819 IEM_MC_ELSE()
4820 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4821 IEM_MC_ENDIF();
4822
4823 IEM_MC_ADVANCE_RIP();
4824 IEM_MC_END();
4825 }
4826 else
4827 {
4828 /* greg, [mem] */
4829 IEM_MC_BEGIN(3, 4);
4830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4831 IEM_MC_LOCAL(uint32_t, fMxcsr);
4832 IEM_MC_LOCAL(int32_t, i32Dst);
4833 IEM_MC_LOCAL(uint64_t, u64Src);
4834 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4835 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4836 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4837
4838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4841 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4842
4843 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4844 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4845 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4846 IEM_MC_IF_MXCSR_XCPT_PENDING()
4847 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4848 IEM_MC_ELSE()
4849 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4850 IEM_MC_ENDIF();
4851
4852 IEM_MC_ADVANCE_RIP();
4853 IEM_MC_END();
4854 }
4855 }
4856 return VINF_SUCCESS;
4857}
4858
4859
4860/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4861FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4862{
4863 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4865 if (IEM_IS_MODRM_REG_MODE(bRm))
4866 {
4867 /*
4868 * Register, register.
4869 */
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4871 IEM_MC_BEGIN(4, 1);
4872 IEM_MC_LOCAL(uint32_t, fEFlags);
4873 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4874 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4875 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4876 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4877 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4878 IEM_MC_PREPARE_SSE_USAGE();
4879 IEM_MC_FETCH_EFLAGS(fEFlags);
4880 IEM_MC_REF_MXCSR(pfMxcsr);
4881 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4882 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4883 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4884 IEM_MC_IF_MXCSR_XCPT_PENDING()
4885 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4886 IEM_MC_ELSE()
4887 IEM_MC_COMMIT_EFLAGS(fEFlags);
4888 IEM_MC_ENDIF();
4889
4890 IEM_MC_ADVANCE_RIP();
4891 IEM_MC_END();
4892 }
4893 else
4894 {
4895 /*
4896 * Register, memory.
4897 */
4898 IEM_MC_BEGIN(4, 3);
4899 IEM_MC_LOCAL(uint32_t, fEFlags);
4900 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4901 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4902 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4903 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4904 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4906
4907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4909 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4910 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4911
4912 IEM_MC_PREPARE_SSE_USAGE();
4913 IEM_MC_FETCH_EFLAGS(fEFlags);
4914 IEM_MC_REF_MXCSR(pfMxcsr);
4915 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4916 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4917 IEM_MC_IF_MXCSR_XCPT_PENDING()
4918 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4919 IEM_MC_ELSE()
4920 IEM_MC_COMMIT_EFLAGS(fEFlags);
4921 IEM_MC_ENDIF();
4922
4923 IEM_MC_ADVANCE_RIP();
4924 IEM_MC_END();
4925 }
4926 return VINF_SUCCESS;
4927}
4928
4929
4930/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4931FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4932{
4933 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4935 if (IEM_IS_MODRM_REG_MODE(bRm))
4936 {
4937 /*
4938 * Register, register.
4939 */
4940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4941 IEM_MC_BEGIN(4, 1);
4942 IEM_MC_LOCAL(uint32_t, fEFlags);
4943 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4944 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4945 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4946 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4947 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4948 IEM_MC_PREPARE_SSE_USAGE();
4949 IEM_MC_FETCH_EFLAGS(fEFlags);
4950 IEM_MC_REF_MXCSR(pfMxcsr);
4951 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4952 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4953 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4954 IEM_MC_IF_MXCSR_XCPT_PENDING()
4955 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4956 IEM_MC_ELSE()
4957 IEM_MC_COMMIT_EFLAGS(fEFlags);
4958 IEM_MC_ENDIF();
4959
4960 IEM_MC_ADVANCE_RIP();
4961 IEM_MC_END();
4962 }
4963 else
4964 {
4965 /*
4966 * Register, memory.
4967 */
4968 IEM_MC_BEGIN(4, 3);
4969 IEM_MC_LOCAL(uint32_t, fEFlags);
4970 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4971 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4972 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4973 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4974 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4976
4977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4979 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4980 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4981
4982 IEM_MC_PREPARE_SSE_USAGE();
4983 IEM_MC_FETCH_EFLAGS(fEFlags);
4984 IEM_MC_REF_MXCSR(pfMxcsr);
4985 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4986 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4987 IEM_MC_IF_MXCSR_XCPT_PENDING()
4988 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4989 IEM_MC_ELSE()
4990 IEM_MC_COMMIT_EFLAGS(fEFlags);
4991 IEM_MC_ENDIF();
4992
4993 IEM_MC_ADVANCE_RIP();
4994 IEM_MC_END();
4995 }
4996 return VINF_SUCCESS;
4997}
4998
4999
5000/* Opcode 0xf3 0x0f 0x2e - invalid */
5001/* Opcode 0xf2 0x0f 0x2e - invalid */
5002
5003
5004/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5005FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5006{
5007 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5009 if (IEM_IS_MODRM_REG_MODE(bRm))
5010 {
5011 /*
5012 * Register, register.
5013 */
5014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5015 IEM_MC_BEGIN(4, 1);
5016 IEM_MC_LOCAL(uint32_t, fEFlags);
5017 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5018 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5019 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5020 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5021 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5022 IEM_MC_PREPARE_SSE_USAGE();
5023 IEM_MC_FETCH_EFLAGS(fEFlags);
5024 IEM_MC_REF_MXCSR(pfMxcsr);
5025 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5026 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5027 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5028 IEM_MC_IF_MXCSR_XCPT_PENDING()
5029 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5030 IEM_MC_ELSE()
5031 IEM_MC_COMMIT_EFLAGS(fEFlags);
5032 IEM_MC_ENDIF();
5033
5034 IEM_MC_ADVANCE_RIP();
5035 IEM_MC_END();
5036 }
5037 else
5038 {
5039 /*
5040 * Register, memory.
5041 */
5042 IEM_MC_BEGIN(4, 3);
5043 IEM_MC_LOCAL(uint32_t, fEFlags);
5044 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5045 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5046 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5047 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5048 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5050
5051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5054 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5055
5056 IEM_MC_PREPARE_SSE_USAGE();
5057 IEM_MC_FETCH_EFLAGS(fEFlags);
5058 IEM_MC_REF_MXCSR(pfMxcsr);
5059 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5060 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5061 IEM_MC_IF_MXCSR_XCPT_PENDING()
5062 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5063 IEM_MC_ELSE()
5064 IEM_MC_COMMIT_EFLAGS(fEFlags);
5065 IEM_MC_ENDIF();
5066
5067 IEM_MC_ADVANCE_RIP();
5068 IEM_MC_END();
5069 }
5070 return VINF_SUCCESS;
5071}
5072
5073
5074/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5075FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5076{
5077 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5079 if (IEM_IS_MODRM_REG_MODE(bRm))
5080 {
5081 /*
5082 * Register, register.
5083 */
5084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5085 IEM_MC_BEGIN(4, 1);
5086 IEM_MC_LOCAL(uint32_t, fEFlags);
5087 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5088 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5089 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5090 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5092 IEM_MC_PREPARE_SSE_USAGE();
5093 IEM_MC_FETCH_EFLAGS(fEFlags);
5094 IEM_MC_REF_MXCSR(pfMxcsr);
5095 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5096 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5097 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5098 IEM_MC_IF_MXCSR_XCPT_PENDING()
5099 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5100 IEM_MC_ELSE()
5101 IEM_MC_COMMIT_EFLAGS(fEFlags);
5102 IEM_MC_ENDIF();
5103
5104 IEM_MC_ADVANCE_RIP();
5105 IEM_MC_END();
5106 }
5107 else
5108 {
5109 /*
5110 * Register, memory.
5111 */
5112 IEM_MC_BEGIN(4, 3);
5113 IEM_MC_LOCAL(uint32_t, fEFlags);
5114 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5115 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5116 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5117 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5118 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5120
5121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5124 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5125
5126 IEM_MC_PREPARE_SSE_USAGE();
5127 IEM_MC_FETCH_EFLAGS(fEFlags);
5128 IEM_MC_REF_MXCSR(pfMxcsr);
5129 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5130 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5131 IEM_MC_IF_MXCSR_XCPT_PENDING()
5132 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5133 IEM_MC_ELSE()
5134 IEM_MC_COMMIT_EFLAGS(fEFlags);
5135 IEM_MC_ENDIF();
5136
5137 IEM_MC_ADVANCE_RIP();
5138 IEM_MC_END();
5139 }
5140 return VINF_SUCCESS;
5141}
5142
5143
5144/* Opcode 0xf3 0x0f 0x2f - invalid */
5145/* Opcode 0xf2 0x0f 0x2f - invalid */
5146
5147/** Opcode 0x0f 0x30. */
5148FNIEMOP_DEF(iemOp_wrmsr)
5149{
5150 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5152 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
5153}
5154
5155
5156/** Opcode 0x0f 0x31. */
5157FNIEMOP_DEF(iemOp_rdtsc)
5158{
5159 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5161 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
5162}
5163
5164
5165/** Opcode 0x0f 0x33. */
5166FNIEMOP_DEF(iemOp_rdmsr)
5167{
5168 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5170 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
5171}
5172
5173
5174/** Opcode 0x0f 0x34. */
5175FNIEMOP_DEF(iemOp_rdpmc)
5176{
5177 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
5180}
5181
5182
5183/** Opcode 0x0f 0x34. */
5184FNIEMOP_DEF(iemOp_sysenter)
5185{
5186 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5188 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
5189}
5190
5191/** Opcode 0x0f 0x35. */
5192FNIEMOP_DEF(iemOp_sysexit)
5193{
5194 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5196 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5197}
5198
5199/** Opcode 0x0f 0x37. */
5200FNIEMOP_STUB(iemOp_getsec);
5201
5202
5203/** Opcode 0x0f 0x38. */
5204FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5205{
5206#ifdef IEM_WITH_THREE_0F_38
5207 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5208 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5209#else
5210 IEMOP_BITCH_ABOUT_STUB();
5211 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5212#endif
5213}
5214
5215
5216/** Opcode 0x0f 0x3a. */
5217FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5218{
5219#ifdef IEM_WITH_THREE_0F_3A
5220 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5221 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5222#else
5223 IEMOP_BITCH_ABOUT_STUB();
5224 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5225#endif
5226}
5227
5228
5229/**
5230 * Implements a conditional move.
5231 *
5232 * Wish there was an obvious way to do this where we could share and reduce
5233 * code bloat.
5234 *
5235 * @param a_Cnd The conditional "microcode" operation.
5236 */
5237#define CMOV_X(a_Cnd) \
5238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5239 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5240 { \
5241 switch (pVCpu->iem.s.enmEffOpSize) \
5242 { \
5243 case IEMMODE_16BIT: \
5244 IEM_MC_BEGIN(0, 1); \
5245 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5246 a_Cnd { \
5247 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5248 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5249 } IEM_MC_ENDIF(); \
5250 IEM_MC_ADVANCE_RIP(); \
5251 IEM_MC_END(); \
5252 return VINF_SUCCESS; \
5253 \
5254 case IEMMODE_32BIT: \
5255 IEM_MC_BEGIN(0, 1); \
5256 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5257 a_Cnd { \
5258 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5259 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5260 } IEM_MC_ELSE() { \
5261 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5262 } IEM_MC_ENDIF(); \
5263 IEM_MC_ADVANCE_RIP(); \
5264 IEM_MC_END(); \
5265 return VINF_SUCCESS; \
5266 \
5267 case IEMMODE_64BIT: \
5268 IEM_MC_BEGIN(0, 1); \
5269 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5270 a_Cnd { \
5271 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5272 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5273 } IEM_MC_ENDIF(); \
5274 IEM_MC_ADVANCE_RIP(); \
5275 IEM_MC_END(); \
5276 return VINF_SUCCESS; \
5277 \
5278 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5279 } \
5280 } \
5281 else \
5282 { \
5283 switch (pVCpu->iem.s.enmEffOpSize) \
5284 { \
5285 case IEMMODE_16BIT: \
5286 IEM_MC_BEGIN(0, 2); \
5287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5288 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5290 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5291 a_Cnd { \
5292 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5293 } IEM_MC_ENDIF(); \
5294 IEM_MC_ADVANCE_RIP(); \
5295 IEM_MC_END(); \
5296 return VINF_SUCCESS; \
5297 \
5298 case IEMMODE_32BIT: \
5299 IEM_MC_BEGIN(0, 2); \
5300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5301 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5303 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5304 a_Cnd { \
5305 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5306 } IEM_MC_ELSE() { \
5307 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5308 } IEM_MC_ENDIF(); \
5309 IEM_MC_ADVANCE_RIP(); \
5310 IEM_MC_END(); \
5311 return VINF_SUCCESS; \
5312 \
5313 case IEMMODE_64BIT: \
5314 IEM_MC_BEGIN(0, 2); \
5315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5316 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5318 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5319 a_Cnd { \
5320 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5321 } IEM_MC_ENDIF(); \
5322 IEM_MC_ADVANCE_RIP(); \
5323 IEM_MC_END(); \
5324 return VINF_SUCCESS; \
5325 \
5326 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5327 } \
5328 } do {} while (0)
5329
5330
5331
5332/** Opcode 0x0f 0x40. */
5333FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5334{
5335 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5336 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5337}
5338
5339
5340/** Opcode 0x0f 0x41. */
5341FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5342{
5343 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5344 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5345}
5346
5347
5348/** Opcode 0x0f 0x42. */
5349FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5350{
5351 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5352 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5353}
5354
5355
5356/** Opcode 0x0f 0x43. */
5357FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5358{
5359 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5360 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5361}
5362
5363
5364/** Opcode 0x0f 0x44. */
5365FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5366{
5367 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5368 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5369}
5370
5371
5372/** Opcode 0x0f 0x45. */
5373FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5374{
5375 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5376 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5377}
5378
5379
5380/** Opcode 0x0f 0x46. */
5381FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5382{
5383 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5384 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5385}
5386
5387
5388/** Opcode 0x0f 0x47. */
5389FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5390{
5391 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5392 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5393}
5394
5395
5396/** Opcode 0x0f 0x48. */
5397FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5398{
5399 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5400 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5401}
5402
5403
5404/** Opcode 0x0f 0x49. */
5405FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5406{
5407 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5408 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5409}
5410
5411
5412/** Opcode 0x0f 0x4a. */
5413FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5414{
5415 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5416 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5417}
5418
5419
5420/** Opcode 0x0f 0x4b. */
5421FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5422{
5423 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5424 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5425}
5426
5427
5428/** Opcode 0x0f 0x4c. */
5429FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5430{
5431 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5432 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5433}
5434
5435
5436/** Opcode 0x0f 0x4d. */
5437FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5438{
5439 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5440 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5441}
5442
5443
5444/** Opcode 0x0f 0x4e. */
5445FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5446{
5447 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5448 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5449}
5450
5451
5452/** Opcode 0x0f 0x4f. */
5453FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5454{
5455 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5456 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5457}
5458
5459#undef CMOV_X
5460
5461/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5462FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5463{
5464 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5466 if (IEM_IS_MODRM_REG_MODE(bRm))
5467 {
5468 /*
5469 * Register, register.
5470 */
5471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5472 IEM_MC_BEGIN(2, 1);
5473 IEM_MC_LOCAL(uint8_t, u8Dst);
5474 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5475 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5477 IEM_MC_PREPARE_SSE_USAGE();
5478 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5479 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5480 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5481 IEM_MC_ADVANCE_RIP();
5482 IEM_MC_END();
5483 return VINF_SUCCESS;
5484 }
5485
5486 /* No memory operand. */
5487 return IEMOP_RAISE_INVALID_OPCODE();
5488}
5489
5490
5491/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5492FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5493{
5494 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5496 if (IEM_IS_MODRM_REG_MODE(bRm))
5497 {
5498 /*
5499 * Register, register.
5500 */
5501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5502 IEM_MC_BEGIN(2, 1);
5503 IEM_MC_LOCAL(uint8_t, u8Dst);
5504 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5505 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5506 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5507 IEM_MC_PREPARE_SSE_USAGE();
5508 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5509 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5510 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5511 IEM_MC_ADVANCE_RIP();
5512 IEM_MC_END();
5513 return VINF_SUCCESS;
5514 }
5515
5516 /* No memory operand. */
5517 return IEMOP_RAISE_INVALID_OPCODE();
5518
5519}
5520
5521
5522/* Opcode 0xf3 0x0f 0x50 - invalid */
5523/* Opcode 0xf2 0x0f 0x50 - invalid */
5524
5525
5526/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5527FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5528{
5529 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5530 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5531}
5532
5533
5534/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5535FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5536{
5537 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5538 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5539}
5540
5541
5542/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5543FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5544{
5545 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5546 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5547}
5548
5549
5550/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5551FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5552{
5553 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5554 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5555}
5556
5557
5558/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5559FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
5560/* Opcode 0x66 0x0f 0x52 - invalid */
5561/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5562FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
5563/* Opcode 0xf2 0x0f 0x52 - invalid */
5564
5565/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5566FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5567/* Opcode 0x66 0x0f 0x53 - invalid */
5568/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5569FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5570/* Opcode 0xf2 0x0f 0x53 - invalid */
5571
5572
5573/** Opcode 0x0f 0x54 - andps Vps, Wps */
5574FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5575{
5576 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5577 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5578}
5579
5580
5581/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5582FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5583{
5584 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5585 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5586}
5587
5588
5589/* Opcode 0xf3 0x0f 0x54 - invalid */
5590/* Opcode 0xf2 0x0f 0x54 - invalid */
5591
5592
5593/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5594FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5595{
5596 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5597 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5598}
5599
5600
5601/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5602FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5603{
5604 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5605 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5606}
5607
5608
5609/* Opcode 0xf3 0x0f 0x55 - invalid */
5610/* Opcode 0xf2 0x0f 0x55 - invalid */
5611
5612
5613/** Opcode 0x0f 0x56 - orps Vps, Wps */
5614FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5615{
5616 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5617 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5618}
5619
5620
5621/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5622FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5623{
5624 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5625 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5626}
5627
5628
5629/* Opcode 0xf3 0x0f 0x56 - invalid */
5630/* Opcode 0xf2 0x0f 0x56 - invalid */
5631
5632
5633/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5634FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5635{
5636 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5637 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5638}
5639
5640
5641/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5642FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5643{
5644 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5645 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5646}
5647
5648
5649/* Opcode 0xf3 0x0f 0x57 - invalid */
5650/* Opcode 0xf2 0x0f 0x57 - invalid */
5651
5652/** Opcode 0x0f 0x58 - addps Vps, Wps */
5653FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5654{
5655 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5656 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5657}
5658
5659
5660/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5661FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5662{
5663 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5664 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5665}
5666
5667
5668/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5669FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5670{
5671 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5672 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5673}
5674
5675
5676/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5677FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5678{
5679 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5680 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5681}
5682
5683
5684/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5685FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5686{
5687 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5688 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5689}
5690
5691
5692/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5693FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5694{
5695 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5696 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5697}
5698
5699
5700/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5701FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5702{
5703 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5704 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5705}
5706
5707
5708/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5709FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5710{
5711 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5712 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5713}
5714
5715
5716/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5717FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5718{
5719 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5720 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5721}
5722
5723
5724/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5725FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5726{
5727 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5728 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5729}
5730
5731
5732/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5733FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5734{
5735 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5736 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5737}
5738
5739
5740/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5741FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5742{
5743 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5744 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5745}
5746
5747
5748/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5749FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5750{
5751 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5752 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5753}
5754
5755
5756/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5757FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5758{
5759 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5760 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5761}
5762
5763
5764/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5765FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5766{
5767 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5768 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5769}
5770
5771
5772/* Opcode 0xf2 0x0f 0x5b - invalid */
5773
5774
5775/** Opcode 0x0f 0x5c - subps Vps, Wps */
5776FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5777{
5778 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5779 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5780}
5781
5782
5783/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5784FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5785{
5786 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5787 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5788}
5789
5790
5791/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5792FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5793{
5794 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5795 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5796}
5797
5798
5799/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5800FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5801{
5802 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5803 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5804}
5805
5806
5807/** Opcode 0x0f 0x5d - minps Vps, Wps */
5808FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5809{
5810 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5811 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5812}
5813
5814
5815/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5816FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5817{
5818 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5819 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5820}
5821
5822
5823/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5824FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5825{
5826 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5827 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5828}
5829
5830
5831/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5832FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5833{
5834 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5835 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5836}
5837
5838
5839/** Opcode 0x0f 0x5e - divps Vps, Wps */
5840FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5841{
5842 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5843 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5844}
5845
5846
5847/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5848FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5849{
5850 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5851 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5852}
5853
5854
5855/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5856FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5857{
5858 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5859 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5860}
5861
5862
5863/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5864FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5865{
5866 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5867 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5868}
5869
5870
5871/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5872FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5873{
5874 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5875 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5876}
5877
5878
5879/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5880FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5881{
5882 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5883 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5884}
5885
5886
5887/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5888FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5889{
5890 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5891 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5892}
5893
5894
5895/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5896FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5897{
5898 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5899 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5900}
5901
5902
5903/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5904FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5905{
5906 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5907 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5908}
5909
5910
5911/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5912FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5913{
5914 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5915 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5916}
5917
5918
5919/* Opcode 0xf3 0x0f 0x60 - invalid */
5920
5921
5922/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5923FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5924{
5925 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5926 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5927 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5928}
5929
5930
5931/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5932FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5933{
5934 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5935 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5936}
5937
5938
5939/* Opcode 0xf3 0x0f 0x61 - invalid */
5940
5941
5942/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5943FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5944{
5945 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5946 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5947}
5948
5949
5950/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5951FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5952{
5953 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5954 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5955}
5956
5957
5958/* Opcode 0xf3 0x0f 0x62 - invalid */
5959
5960
5961
5962/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5963FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5964{
5965 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5966 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5967}
5968
5969
5970/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5971FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5972{
5973 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5974 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5975}
5976
5977
5978/* Opcode 0xf3 0x0f 0x63 - invalid */
5979
5980
5981/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5982FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5983{
5984 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5985 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5986}
5987
5988
5989/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5990FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5991{
5992 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5993 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5994}
5995
5996
5997/* Opcode 0xf3 0x0f 0x64 - invalid */
5998
5999
6000/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6001FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6002{
6003 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6004 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6005}
6006
6007
6008/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6009FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6010{
6011 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6012 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6013}
6014
6015
6016/* Opcode 0xf3 0x0f 0x65 - invalid */
6017
6018
6019/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6020FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6021{
6022 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6023 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6024}
6025
6026
6027/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6028FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6029{
6030 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6031 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6032}
6033
6034
6035/* Opcode 0xf3 0x0f 0x66 - invalid */
6036
6037
6038/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6039FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6040{
6041 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6042 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6043}
6044
6045
6046/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6047FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6048{
6049 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6050 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6051}
6052
6053
6054/* Opcode 0xf3 0x0f 0x67 - invalid */
6055
6056
6057/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6058 * @note Intel and AMD both uses Qd for the second parameter, however they
6059 * both list it as a mmX/mem64 operand and intel describes it as being
6060 * loaded as a qword, so it should be Qq, shouldn't it? */
6061FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6062{
6063 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6064 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6065}
6066
6067
6068/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6069FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6070{
6071 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6072 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6073}
6074
6075
6076/* Opcode 0xf3 0x0f 0x68 - invalid */
6077
6078
6079/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6080 * @note Intel and AMD both uses Qd for the second parameter, however they
6081 * both list it as a mmX/mem64 operand and intel describes it as being
6082 * loaded as a qword, so it should be Qq, shouldn't it? */
6083FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6084{
6085 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6086 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6087}
6088
6089
6090/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6091FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6092{
6093 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6094 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6095
6096}
6097
6098
6099/* Opcode 0xf3 0x0f 0x69 - invalid */
6100
6101
6102/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6103 * @note Intel and AMD both uses Qd for the second parameter, however they
6104 * both list it as a mmX/mem64 operand and intel describes it as being
6105 * loaded as a qword, so it should be Qq, shouldn't it? */
6106FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6107{
6108 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6109 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6110}
6111
6112
6113/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6114FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6115{
6116 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6117 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6118}
6119
6120
6121/* Opcode 0xf3 0x0f 0x6a - invalid */
6122
6123
6124/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6125FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6126{
6127 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6128 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6129}
6130
6131
6132/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6133FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6134{
6135 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6136 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6137}
6138
6139
6140/* Opcode 0xf3 0x0f 0x6b - invalid */
6141
6142
6143/* Opcode 0x0f 0x6c - invalid */
6144
6145
6146/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6147FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6148{
6149 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6150 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6151}
6152
6153
6154/* Opcode 0xf3 0x0f 0x6c - invalid */
6155/* Opcode 0xf2 0x0f 0x6c - invalid */
6156
6157
6158/* Opcode 0x0f 0x6d - invalid */
6159
6160
6161/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6162FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6163{
6164 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6165 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6166}
6167
6168
6169/* Opcode 0xf3 0x0f 0x6d - invalid */
6170
6171
6172FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6173{
6174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6175 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6176 {
6177 /**
6178 * @opcode 0x6e
6179 * @opcodesub rex.w=1
6180 * @oppfx none
6181 * @opcpuid mmx
6182 * @opgroup og_mmx_datamove
6183 * @opxcpttype 5
6184 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6185 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6186 */
6187 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6188 if (IEM_IS_MODRM_REG_MODE(bRm))
6189 {
6190 /* MMX, greg64 */
6191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6192 IEM_MC_BEGIN(0, 1);
6193 IEM_MC_LOCAL(uint64_t, u64Tmp);
6194
6195 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6196 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6197 IEM_MC_FPU_TO_MMX_MODE();
6198
6199 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6200 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6201
6202 IEM_MC_ADVANCE_RIP();
6203 IEM_MC_END();
6204 }
6205 else
6206 {
6207 /* MMX, [mem64] */
6208 IEM_MC_BEGIN(0, 2);
6209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6210 IEM_MC_LOCAL(uint64_t, u64Tmp);
6211
6212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6214 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6215 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6216 IEM_MC_FPU_TO_MMX_MODE();
6217
6218 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6219 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6220
6221 IEM_MC_ADVANCE_RIP();
6222 IEM_MC_END();
6223 }
6224 }
6225 else
6226 {
6227 /**
6228 * @opdone
6229 * @opcode 0x6e
6230 * @opcodesub rex.w=0
6231 * @oppfx none
6232 * @opcpuid mmx
6233 * @opgroup og_mmx_datamove
6234 * @opxcpttype 5
6235 * @opfunction iemOp_movd_q_Pd_Ey
6236 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6237 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6238 */
6239 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6240 if (IEM_IS_MODRM_REG_MODE(bRm))
6241 {
6242 /* MMX, greg */
6243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6244 IEM_MC_BEGIN(0, 1);
6245 IEM_MC_LOCAL(uint64_t, u64Tmp);
6246
6247 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6248 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6249 IEM_MC_FPU_TO_MMX_MODE();
6250
6251 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6252 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6253
6254 IEM_MC_ADVANCE_RIP();
6255 IEM_MC_END();
6256 }
6257 else
6258 {
6259 /* MMX, [mem] */
6260 IEM_MC_BEGIN(0, 2);
6261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6262 IEM_MC_LOCAL(uint32_t, u32Tmp);
6263
6264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6267 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6268 IEM_MC_FPU_TO_MMX_MODE();
6269
6270 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6271 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6272
6273 IEM_MC_ADVANCE_RIP();
6274 IEM_MC_END();
6275 }
6276 }
6277 return VINF_SUCCESS;
6278}
6279
6280FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6281{
6282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6283 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6284 {
6285 /**
6286 * @opcode 0x6e
6287 * @opcodesub rex.w=1
6288 * @oppfx 0x66
6289 * @opcpuid sse2
6290 * @opgroup og_sse2_simdint_datamove
6291 * @opxcpttype 5
6292 * @optest 64-bit / op1=1 op2=2 -> op1=2
6293 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6294 */
6295 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6296 if (IEM_IS_MODRM_REG_MODE(bRm))
6297 {
6298 /* XMM, greg64 */
6299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6300 IEM_MC_BEGIN(0, 1);
6301 IEM_MC_LOCAL(uint64_t, u64Tmp);
6302
6303 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6304 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6305
6306 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6307 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6308
6309 IEM_MC_ADVANCE_RIP();
6310 IEM_MC_END();
6311 }
6312 else
6313 {
6314 /* XMM, [mem64] */
6315 IEM_MC_BEGIN(0, 2);
6316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6317 IEM_MC_LOCAL(uint64_t, u64Tmp);
6318
6319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6321 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6322 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6323
6324 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6325 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6326
6327 IEM_MC_ADVANCE_RIP();
6328 IEM_MC_END();
6329 }
6330 }
6331 else
6332 {
6333 /**
6334 * @opdone
6335 * @opcode 0x6e
6336 * @opcodesub rex.w=0
6337 * @oppfx 0x66
6338 * @opcpuid sse2
6339 * @opgroup og_sse2_simdint_datamove
6340 * @opxcpttype 5
6341 * @opfunction iemOp_movd_q_Vy_Ey
6342 * @optest op1=1 op2=2 -> op1=2
6343 * @optest op1=0 op2=-42 -> op1=-42
6344 */
6345 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6346 if (IEM_IS_MODRM_REG_MODE(bRm))
6347 {
6348 /* XMM, greg32 */
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6350 IEM_MC_BEGIN(0, 1);
6351 IEM_MC_LOCAL(uint32_t, u32Tmp);
6352
6353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6354 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6355
6356 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6357 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6358
6359 IEM_MC_ADVANCE_RIP();
6360 IEM_MC_END();
6361 }
6362 else
6363 {
6364 /* XMM, [mem32] */
6365 IEM_MC_BEGIN(0, 2);
6366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6367 IEM_MC_LOCAL(uint32_t, u32Tmp);
6368
6369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6371 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6372 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6373
6374 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6375 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6376
6377 IEM_MC_ADVANCE_RIP();
6378 IEM_MC_END();
6379 }
6380 }
6381 return VINF_SUCCESS;
6382}
6383
6384/* Opcode 0xf3 0x0f 0x6e - invalid */
6385
6386
6387/**
6388 * @opcode 0x6f
6389 * @oppfx none
6390 * @opcpuid mmx
6391 * @opgroup og_mmx_datamove
6392 * @opxcpttype 5
6393 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6394 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6395 */
6396FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6397{
6398 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6400 if (IEM_IS_MODRM_REG_MODE(bRm))
6401 {
6402 /*
6403 * Register, register.
6404 */
6405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6406 IEM_MC_BEGIN(0, 1);
6407 IEM_MC_LOCAL(uint64_t, u64Tmp);
6408
6409 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6410 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6411 IEM_MC_FPU_TO_MMX_MODE();
6412
6413 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6414 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6415
6416 IEM_MC_ADVANCE_RIP();
6417 IEM_MC_END();
6418 }
6419 else
6420 {
6421 /*
6422 * Register, memory.
6423 */
6424 IEM_MC_BEGIN(0, 2);
6425 IEM_MC_LOCAL(uint64_t, u64Tmp);
6426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6427
6428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6430 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6431 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6432 IEM_MC_FPU_TO_MMX_MODE();
6433
6434 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6435 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6436
6437 IEM_MC_ADVANCE_RIP();
6438 IEM_MC_END();
6439 }
6440 return VINF_SUCCESS;
6441}
6442
6443/**
6444 * @opcode 0x6f
6445 * @oppfx 0x66
6446 * @opcpuid sse2
6447 * @opgroup og_sse2_simdint_datamove
6448 * @opxcpttype 1
6449 * @optest op1=1 op2=2 -> op1=2
6450 * @optest op1=0 op2=-42 -> op1=-42
6451 */
6452FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6453{
6454 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6456 if (IEM_IS_MODRM_REG_MODE(bRm))
6457 {
6458 /*
6459 * Register, register.
6460 */
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462 IEM_MC_BEGIN(0, 0);
6463
6464 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6466
6467 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6468 IEM_GET_MODRM_RM(pVCpu, bRm));
6469 IEM_MC_ADVANCE_RIP();
6470 IEM_MC_END();
6471 }
6472 else
6473 {
6474 /*
6475 * Register, memory.
6476 */
6477 IEM_MC_BEGIN(0, 2);
6478 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6480
6481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6485
6486 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6487 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6488
6489 IEM_MC_ADVANCE_RIP();
6490 IEM_MC_END();
6491 }
6492 return VINF_SUCCESS;
6493}
6494
6495/**
6496 * @opcode 0x6f
6497 * @oppfx 0xf3
6498 * @opcpuid sse2
6499 * @opgroup og_sse2_simdint_datamove
6500 * @opxcpttype 4UA
6501 * @optest op1=1 op2=2 -> op1=2
6502 * @optest op1=0 op2=-42 -> op1=-42
6503 */
6504FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6505{
6506 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6508 if (IEM_IS_MODRM_REG_MODE(bRm))
6509 {
6510 /*
6511 * Register, register.
6512 */
6513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6514 IEM_MC_BEGIN(0, 0);
6515 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6517 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6518 IEM_GET_MODRM_RM(pVCpu, bRm));
6519 IEM_MC_ADVANCE_RIP();
6520 IEM_MC_END();
6521 }
6522 else
6523 {
6524 /*
6525 * Register, memory.
6526 */
6527 IEM_MC_BEGIN(0, 2);
6528 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6530
6531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6535 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6536 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6537
6538 IEM_MC_ADVANCE_RIP();
6539 IEM_MC_END();
6540 }
6541 return VINF_SUCCESS;
6542}
6543
6544
6545/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6546FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6547{
6548 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6550 if (IEM_IS_MODRM_REG_MODE(bRm))
6551 {
6552 /*
6553 * Register, register.
6554 */
6555 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6557
6558 IEM_MC_BEGIN(3, 0);
6559 IEM_MC_ARG(uint64_t *, pDst, 0);
6560 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6561 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6562 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6563 IEM_MC_PREPARE_FPU_USAGE();
6564 IEM_MC_FPU_TO_MMX_MODE();
6565
6566 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6567 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6568 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6569 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6570
6571 IEM_MC_ADVANCE_RIP();
6572 IEM_MC_END();
6573 }
6574 else
6575 {
6576 /*
6577 * Register, memory.
6578 */
6579 IEM_MC_BEGIN(3, 2);
6580 IEM_MC_ARG(uint64_t *, pDst, 0);
6581 IEM_MC_LOCAL(uint64_t, uSrc);
6582 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6584
6585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6586 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6587 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6589 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6590 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6591
6592 IEM_MC_PREPARE_FPU_USAGE();
6593 IEM_MC_FPU_TO_MMX_MODE();
6594
6595 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6596 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6597 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6598
6599 IEM_MC_ADVANCE_RIP();
6600 IEM_MC_END();
6601 }
6602 return VINF_SUCCESS;
6603}
6604
6605
6606/**
6607 * Common worker for SSE2 instructions on the forms:
6608 * pshufd xmm1, xmm2/mem128, imm8
6609 * pshufhw xmm1, xmm2/mem128, imm8
6610 * pshuflw xmm1, xmm2/mem128, imm8
6611 *
6612 * Proper alignment of the 128-bit operand is enforced.
6613 * Exceptions type 4. SSE2 cpuid checks.
6614 */
6615FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6616{
6617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6618 if (IEM_IS_MODRM_REG_MODE(bRm))
6619 {
6620 /*
6621 * Register, register.
6622 */
6623 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6625
6626 IEM_MC_BEGIN(3, 0);
6627 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6628 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6629 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6630 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6631 IEM_MC_PREPARE_SSE_USAGE();
6632 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6633 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6634 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6635 IEM_MC_ADVANCE_RIP();
6636 IEM_MC_END();
6637 }
6638 else
6639 {
6640 /*
6641 * Register, memory.
6642 */
6643 IEM_MC_BEGIN(3, 2);
6644 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6645 IEM_MC_LOCAL(RTUINT128U, uSrc);
6646 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6648
6649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6650 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6651 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6653 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6654
6655 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6656 IEM_MC_PREPARE_SSE_USAGE();
6657 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6658 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6659
6660 IEM_MC_ADVANCE_RIP();
6661 IEM_MC_END();
6662 }
6663 return VINF_SUCCESS;
6664}
6665
6666
6667/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6668FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6669{
6670 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6671 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6672}
6673
6674
6675/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6676FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6677{
6678 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6679 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6680}
6681
6682
6683/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6684FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6685{
6686 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6687 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6688}
6689
6690
6691/**
6692 * Common worker for MMX instructions of the form:
6693 * psrlw mm, imm8
6694 * psraw mm, imm8
6695 * psllw mm, imm8
6696 * psrld mm, imm8
6697 * psrad mm, imm8
6698 * pslld mm, imm8
6699 * psrlq mm, imm8
6700 * psllq mm, imm8
6701 *
6702 */
6703FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6704{
6705 if (IEM_IS_MODRM_REG_MODE(bRm))
6706 {
6707 /*
6708 * Register, immediate.
6709 */
6710 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6712
6713 IEM_MC_BEGIN(2, 0);
6714 IEM_MC_ARG(uint64_t *, pDst, 0);
6715 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6716 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6717 IEM_MC_PREPARE_FPU_USAGE();
6718 IEM_MC_FPU_TO_MMX_MODE();
6719
6720 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6721 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6722 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6723
6724 IEM_MC_ADVANCE_RIP();
6725 IEM_MC_END();
6726 }
6727 else
6728 {
6729 /*
6730 * Register, memory not supported.
6731 */
6732 /// @todo Caller already enforced register mode?!
6733 }
6734 return VINF_SUCCESS;
6735}
6736
6737
6738/**
6739 * Common worker for SSE2 instructions of the form:
6740 * psrlw xmm, imm8
6741 * psraw xmm, imm8
6742 * psllw xmm, imm8
6743 * psrld xmm, imm8
6744 * psrad xmm, imm8
6745 * pslld xmm, imm8
6746 * psrlq xmm, imm8
6747 * psllq xmm, imm8
6748 *
6749 */
6750FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6751{
6752 if (IEM_IS_MODRM_REG_MODE(bRm))
6753 {
6754 /*
6755 * Register, immediate.
6756 */
6757 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6759
6760 IEM_MC_BEGIN(2, 0);
6761 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6762 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6763 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6764 IEM_MC_PREPARE_SSE_USAGE();
6765 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6766 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6767 IEM_MC_ADVANCE_RIP();
6768 IEM_MC_END();
6769 }
6770 else
6771 {
6772 /*
6773 * Register, memory.
6774 */
6775 /// @todo Caller already enforced register mode?!
6776 }
6777 return VINF_SUCCESS;
6778}
6779
6780
6781/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6782FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6783{
6784// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6785 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6786}
6787
6788
6789/** Opcode 0x66 0x0f 0x71 11/2. */
6790FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6791{
6792// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6793 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6794}
6795
6796
6797/** Opcode 0x0f 0x71 11/4. */
6798FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6799{
6800// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6801 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6802}
6803
6804
6805/** Opcode 0x66 0x0f 0x71 11/4. */
6806FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6807{
6808// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6809 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6810}
6811
6812
6813/** Opcode 0x0f 0x71 11/6. */
6814FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6815{
6816// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6817 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6818}
6819
6820
6821/** Opcode 0x66 0x0f 0x71 11/6. */
6822FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6823{
6824// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6825 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6826}
6827
6828
6829/**
6830 * Group 12 jump table for register variant.
6831 */
6832IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6833{
6834 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6835 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6836 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6837 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6838 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6839 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6840 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6841 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6842};
6843AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6844
6845
6846/** Opcode 0x0f 0x71. */
6847FNIEMOP_DEF(iemOp_Grp12)
6848{
6849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6850 if (IEM_IS_MODRM_REG_MODE(bRm))
6851 /* register, register */
6852 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6853 + pVCpu->iem.s.idxPrefix], bRm);
6854 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6855}
6856
6857
6858/** Opcode 0x0f 0x72 11/2. */
6859FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6860{
6861// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6862 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6863}
6864
6865
6866/** Opcode 0x66 0x0f 0x72 11/2. */
6867FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6868{
6869// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6870 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6871}
6872
6873
6874/** Opcode 0x0f 0x72 11/4. */
6875FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6876{
6877// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6878 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6879}
6880
6881
6882/** Opcode 0x66 0x0f 0x72 11/4. */
6883FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6884{
6885// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6886 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6887}
6888
6889
6890/** Opcode 0x0f 0x72 11/6. */
6891FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6892{
6893// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6894 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6895}
6896
6897/** Opcode 0x66 0x0f 0x72 11/6. */
6898FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6899{
6900// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6901 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6902}
6903
6904
6905/**
6906 * Group 13 jump table for register variant.
6907 */
6908IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6909{
6910 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6911 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6912 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6913 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6914 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6915 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6916 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6917 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6918};
6919AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6920
6921/** Opcode 0x0f 0x72. */
6922FNIEMOP_DEF(iemOp_Grp13)
6923{
6924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6925 if (IEM_IS_MODRM_REG_MODE(bRm))
6926 /* register, register */
6927 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6928 + pVCpu->iem.s.idxPrefix], bRm);
6929 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6930}
6931
6932
6933/** Opcode 0x0f 0x73 11/2. */
6934FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6935{
6936// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6937 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6938}
6939
6940
6941/** Opcode 0x66 0x0f 0x73 11/2. */
6942FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6943{
6944// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6945 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6946}
6947
6948
6949/** Opcode 0x66 0x0f 0x73 11/3. */
6950FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6951{
6952// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6953 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6954}
6955
6956
6957/** Opcode 0x0f 0x73 11/6. */
6958FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6959{
6960// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6961 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6962}
6963
6964
6965/** Opcode 0x66 0x0f 0x73 11/6. */
6966FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6967{
6968// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6969 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6970}
6971
6972
6973/** Opcode 0x66 0x0f 0x73 11/7. */
6974FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6975{
6976// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6977 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6978}
6979
6980/**
6981 * Group 14 jump table for register variant.
6982 */
6983IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6984{
6985 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6986 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6987 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6988 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6989 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6990 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6991 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6992 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6993};
6994AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6995
6996
6997/** Opcode 0x0f 0x73. */
6998FNIEMOP_DEF(iemOp_Grp14)
6999{
7000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7001 if (IEM_IS_MODRM_REG_MODE(bRm))
7002 /* register, register */
7003 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7004 + pVCpu->iem.s.idxPrefix], bRm);
7005 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7006}
7007
7008
7009/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7010FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7011{
7012 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7013 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7014}
7015
7016
7017/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7018FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7019{
7020 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7021 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7022}
7023
7024
7025/* Opcode 0xf3 0x0f 0x74 - invalid */
7026/* Opcode 0xf2 0x0f 0x74 - invalid */
7027
7028
7029/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7030FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7031{
7032 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7033 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7034}
7035
7036
7037/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7038FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7039{
7040 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7041 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7042}
7043
7044
7045/* Opcode 0xf3 0x0f 0x75 - invalid */
7046/* Opcode 0xf2 0x0f 0x75 - invalid */
7047
7048
7049/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7050FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7051{
7052 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7053 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7054}
7055
7056
7057/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7058FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7059{
7060 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7061 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7062}
7063
7064
7065/* Opcode 0xf3 0x0f 0x76 - invalid */
7066/* Opcode 0xf2 0x0f 0x76 - invalid */
7067
7068
7069/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7070FNIEMOP_DEF(iemOp_emms)
7071{
7072 IEMOP_MNEMONIC(emms, "emms");
7073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7074
7075 IEM_MC_BEGIN(0,0);
7076 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7077 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7078 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7079 IEM_MC_FPU_FROM_MMX_MODE();
7080 IEM_MC_ADVANCE_RIP();
7081 IEM_MC_END();
7082 return VINF_SUCCESS;
7083}
7084
7085/* Opcode 0x66 0x0f 0x77 - invalid */
7086/* Opcode 0xf3 0x0f 0x77 - invalid */
7087/* Opcode 0xf2 0x0f 0x77 - invalid */
7088
7089/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7090#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7091FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7092{
7093 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7094 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7095 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7096 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7097
7098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7099 if (IEM_IS_MODRM_REG_MODE(bRm))
7100 {
7101 /*
7102 * Register, register.
7103 */
7104 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7105 if (enmEffOpSize == IEMMODE_64BIT)
7106 {
7107 IEM_MC_BEGIN(2, 0);
7108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7109 IEM_MC_ARG(uint64_t, u64Enc, 1);
7110 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7111 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7112 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7113 IEM_MC_END();
7114 }
7115 else
7116 {
7117 IEM_MC_BEGIN(2, 0);
7118 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7119 IEM_MC_ARG(uint32_t, u32Enc, 1);
7120 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7121 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7122 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
7123 IEM_MC_END();
7124 }
7125 }
7126 else
7127 {
7128 /*
7129 * Memory, register.
7130 */
7131 if (enmEffOpSize == IEMMODE_64BIT)
7132 {
7133 IEM_MC_BEGIN(3, 0);
7134 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7135 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7136 IEM_MC_ARG(uint64_t, u64Enc, 2);
7137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7138 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7139 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7140 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7141 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7142 IEM_MC_END();
7143 }
7144 else
7145 {
7146 IEM_MC_BEGIN(3, 0);
7147 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7148 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7149 IEM_MC_ARG(uint32_t, u32Enc, 2);
7150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7151 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7152 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7153 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7154 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7155 IEM_MC_END();
7156 }
7157 }
7158 return VINF_SUCCESS;
7159}
7160#else
7161FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7162#endif
7163
7164/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7165FNIEMOP_STUB(iemOp_AmdGrp17);
7166/* Opcode 0xf3 0x0f 0x78 - invalid */
7167/* Opcode 0xf2 0x0f 0x78 - invalid */
7168
7169/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7170#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7171FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7172{
7173 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7174 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7175 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7176 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7177
7178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7179 if (IEM_IS_MODRM_REG_MODE(bRm))
7180 {
7181 /*
7182 * Register, register.
7183 */
7184 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7185 if (enmEffOpSize == IEMMODE_64BIT)
7186 {
7187 IEM_MC_BEGIN(2, 0);
7188 IEM_MC_ARG(uint64_t, u64Val, 0);
7189 IEM_MC_ARG(uint64_t, u64Enc, 1);
7190 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7191 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7192 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
7193 IEM_MC_END();
7194 }
7195 else
7196 {
7197 IEM_MC_BEGIN(2, 0);
7198 IEM_MC_ARG(uint32_t, u32Val, 0);
7199 IEM_MC_ARG(uint32_t, u32Enc, 1);
7200 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7201 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7202 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
7203 IEM_MC_END();
7204 }
7205 }
7206 else
7207 {
7208 /*
7209 * Register, memory.
7210 */
7211 if (enmEffOpSize == IEMMODE_64BIT)
7212 {
7213 IEM_MC_BEGIN(3, 0);
7214 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7215 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7216 IEM_MC_ARG(uint64_t, u64Enc, 2);
7217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7218 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7219 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7220 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7221 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7222 IEM_MC_END();
7223 }
7224 else
7225 {
7226 IEM_MC_BEGIN(3, 0);
7227 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7228 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7229 IEM_MC_ARG(uint32_t, u32Enc, 2);
7230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7231 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7232 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7233 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7234 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7235 IEM_MC_END();
7236 }
7237 }
7238 return VINF_SUCCESS;
7239}
7240#else
7241FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7242#endif
7243/* Opcode 0x66 0x0f 0x79 - invalid */
7244/* Opcode 0xf3 0x0f 0x79 - invalid */
7245/* Opcode 0xf2 0x0f 0x79 - invalid */
7246
7247/* Opcode 0x0f 0x7a - invalid */
7248/* Opcode 0x66 0x0f 0x7a - invalid */
7249/* Opcode 0xf3 0x0f 0x7a - invalid */
7250/* Opcode 0xf2 0x0f 0x7a - invalid */
7251
7252/* Opcode 0x0f 0x7b - invalid */
7253/* Opcode 0x66 0x0f 0x7b - invalid */
7254/* Opcode 0xf3 0x0f 0x7b - invalid */
7255/* Opcode 0xf2 0x0f 0x7b - invalid */
7256
7257/* Opcode 0x0f 0x7c - invalid */
7258
7259
7260/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7261FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7262{
7263 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7264 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7265}
7266
7267
7268/* Opcode 0xf3 0x0f 0x7c - invalid */
7269
7270
7271/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7272FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7273{
7274 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7275 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7276}
7277
7278
7279/* Opcode 0x0f 0x7d - invalid */
7280
7281
7282/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7283FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7284{
7285 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7286 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7287}
7288
7289
7290/* Opcode 0xf3 0x0f 0x7d - invalid */
7291
7292
7293/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7294FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7295{
7296 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7297 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7298}
7299
7300
7301/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7302FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7303{
7304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7305 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7306 {
7307 /**
7308 * @opcode 0x7e
7309 * @opcodesub rex.w=1
7310 * @oppfx none
7311 * @opcpuid mmx
7312 * @opgroup og_mmx_datamove
7313 * @opxcpttype 5
7314 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7315 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7316 */
7317 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7318 if (IEM_IS_MODRM_REG_MODE(bRm))
7319 {
7320 /* greg64, MMX */
7321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7322 IEM_MC_BEGIN(0, 1);
7323 IEM_MC_LOCAL(uint64_t, u64Tmp);
7324
7325 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7326 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7327 IEM_MC_FPU_TO_MMX_MODE();
7328
7329 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7330 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7331
7332 IEM_MC_ADVANCE_RIP();
7333 IEM_MC_END();
7334 }
7335 else
7336 {
7337 /* [mem64], MMX */
7338 IEM_MC_BEGIN(0, 2);
7339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7340 IEM_MC_LOCAL(uint64_t, u64Tmp);
7341
7342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7344 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7345 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7346 IEM_MC_FPU_TO_MMX_MODE();
7347
7348 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7349 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7350
7351 IEM_MC_ADVANCE_RIP();
7352 IEM_MC_END();
7353 }
7354 }
7355 else
7356 {
7357 /**
7358 * @opdone
7359 * @opcode 0x7e
7360 * @opcodesub rex.w=0
7361 * @oppfx none
7362 * @opcpuid mmx
7363 * @opgroup og_mmx_datamove
7364 * @opxcpttype 5
7365 * @opfunction iemOp_movd_q_Pd_Ey
7366 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7367 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7368 */
7369 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7370 if (IEM_IS_MODRM_REG_MODE(bRm))
7371 {
7372 /* greg32, MMX */
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7374 IEM_MC_BEGIN(0, 1);
7375 IEM_MC_LOCAL(uint32_t, u32Tmp);
7376
7377 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7378 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7379 IEM_MC_FPU_TO_MMX_MODE();
7380
7381 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7382 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7383
7384 IEM_MC_ADVANCE_RIP();
7385 IEM_MC_END();
7386 }
7387 else
7388 {
7389 /* [mem32], MMX */
7390 IEM_MC_BEGIN(0, 2);
7391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7392 IEM_MC_LOCAL(uint32_t, u32Tmp);
7393
7394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7396 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7397 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7398 IEM_MC_FPU_TO_MMX_MODE();
7399
7400 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7401 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7402
7403 IEM_MC_ADVANCE_RIP();
7404 IEM_MC_END();
7405 }
7406 }
7407 return VINF_SUCCESS;
7408
7409}
7410
7411
7412FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7413{
7414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7415 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7416 {
7417 /**
7418 * @opcode 0x7e
7419 * @opcodesub rex.w=1
7420 * @oppfx 0x66
7421 * @opcpuid sse2
7422 * @opgroup og_sse2_simdint_datamove
7423 * @opxcpttype 5
7424 * @optest 64-bit / op1=1 op2=2 -> op1=2
7425 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7426 */
7427 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7428 if (IEM_IS_MODRM_REG_MODE(bRm))
7429 {
7430 /* greg64, XMM */
7431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7432 IEM_MC_BEGIN(0, 1);
7433 IEM_MC_LOCAL(uint64_t, u64Tmp);
7434
7435 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7437
7438 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7439 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7440
7441 IEM_MC_ADVANCE_RIP();
7442 IEM_MC_END();
7443 }
7444 else
7445 {
7446 /* [mem64], XMM */
7447 IEM_MC_BEGIN(0, 2);
7448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7449 IEM_MC_LOCAL(uint64_t, u64Tmp);
7450
7451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7453 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7455
7456 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7457 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7458
7459 IEM_MC_ADVANCE_RIP();
7460 IEM_MC_END();
7461 }
7462 }
7463 else
7464 {
7465 /**
7466 * @opdone
7467 * @opcode 0x7e
7468 * @opcodesub rex.w=0
7469 * @oppfx 0x66
7470 * @opcpuid sse2
7471 * @opgroup og_sse2_simdint_datamove
7472 * @opxcpttype 5
7473 * @opfunction iemOp_movd_q_Vy_Ey
7474 * @optest op1=1 op2=2 -> op1=2
7475 * @optest op1=0 op2=-42 -> op1=-42
7476 */
7477 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7478 if (IEM_IS_MODRM_REG_MODE(bRm))
7479 {
7480 /* greg32, XMM */
7481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7482 IEM_MC_BEGIN(0, 1);
7483 IEM_MC_LOCAL(uint32_t, u32Tmp);
7484
7485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7487
7488 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7489 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7490
7491 IEM_MC_ADVANCE_RIP();
7492 IEM_MC_END();
7493 }
7494 else
7495 {
7496 /* [mem32], XMM */
7497 IEM_MC_BEGIN(0, 2);
7498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7499 IEM_MC_LOCAL(uint32_t, u32Tmp);
7500
7501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7503 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7504 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7505
7506 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7507 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7508
7509 IEM_MC_ADVANCE_RIP();
7510 IEM_MC_END();
7511 }
7512 }
7513 return VINF_SUCCESS;
7514
7515}
7516
7517/**
7518 * @opcode 0x7e
7519 * @oppfx 0xf3
7520 * @opcpuid sse2
7521 * @opgroup og_sse2_pcksclr_datamove
7522 * @opxcpttype none
7523 * @optest op1=1 op2=2 -> op1=2
7524 * @optest op1=0 op2=-42 -> op1=-42
7525 */
7526FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7527{
7528 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7530 if (IEM_IS_MODRM_REG_MODE(bRm))
7531 {
7532 /*
7533 * Register, register.
7534 */
7535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7536 IEM_MC_BEGIN(0, 2);
7537 IEM_MC_LOCAL(uint64_t, uSrc);
7538
7539 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7540 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7541
7542 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
7543 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7544
7545 IEM_MC_ADVANCE_RIP();
7546 IEM_MC_END();
7547 }
7548 else
7549 {
7550 /*
7551 * Memory, register.
7552 */
7553 IEM_MC_BEGIN(0, 2);
7554 IEM_MC_LOCAL(uint64_t, uSrc);
7555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7556
7557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7559 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7560 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7561
7562 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7563 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7564
7565 IEM_MC_ADVANCE_RIP();
7566 IEM_MC_END();
7567 }
7568 return VINF_SUCCESS;
7569}
7570
7571/* Opcode 0xf2 0x0f 0x7e - invalid */
7572
7573
7574/** Opcode 0x0f 0x7f - movq Qq, Pq */
7575FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7576{
7577 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7579 if (IEM_IS_MODRM_REG_MODE(bRm))
7580 {
7581 /*
7582 * Register, register.
7583 */
7584 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7585 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7587 IEM_MC_BEGIN(0, 1);
7588 IEM_MC_LOCAL(uint64_t, u64Tmp);
7589 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7590 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7591 IEM_MC_FPU_TO_MMX_MODE();
7592
7593 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7594 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7595
7596 IEM_MC_ADVANCE_RIP();
7597 IEM_MC_END();
7598 }
7599 else
7600 {
7601 /*
7602 * Memory, Register.
7603 */
7604 IEM_MC_BEGIN(0, 2);
7605 IEM_MC_LOCAL(uint64_t, u64Tmp);
7606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7607
7608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7610 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7611 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7612 IEM_MC_FPU_TO_MMX_MODE();
7613
7614 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7615 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7616
7617 IEM_MC_ADVANCE_RIP();
7618 IEM_MC_END();
7619 }
7620 return VINF_SUCCESS;
7621}
7622
7623/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7624FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7625{
7626 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7628 if (IEM_IS_MODRM_REG_MODE(bRm))
7629 {
7630 /*
7631 * Register, register.
7632 */
7633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7634 IEM_MC_BEGIN(0, 0);
7635 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7637 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7638 IEM_GET_MODRM_REG(pVCpu, bRm));
7639 IEM_MC_ADVANCE_RIP();
7640 IEM_MC_END();
7641 }
7642 else
7643 {
7644 /*
7645 * Register, memory.
7646 */
7647 IEM_MC_BEGIN(0, 2);
7648 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7650
7651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7653 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7654 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7655
7656 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7657 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7658
7659 IEM_MC_ADVANCE_RIP();
7660 IEM_MC_END();
7661 }
7662 return VINF_SUCCESS;
7663}
7664
7665/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7666FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7667{
7668 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7670 if (IEM_IS_MODRM_REG_MODE(bRm))
7671 {
7672 /*
7673 * Register, register.
7674 */
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7676 IEM_MC_BEGIN(0, 0);
7677 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7678 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7679 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7680 IEM_GET_MODRM_REG(pVCpu, bRm));
7681 IEM_MC_ADVANCE_RIP();
7682 IEM_MC_END();
7683 }
7684 else
7685 {
7686 /*
7687 * Register, memory.
7688 */
7689 IEM_MC_BEGIN(0, 2);
7690 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7692
7693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7695 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7696 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7697
7698 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7699 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7700
7701 IEM_MC_ADVANCE_RIP();
7702 IEM_MC_END();
7703 }
7704 return VINF_SUCCESS;
7705}
7706
7707/* Opcode 0xf2 0x0f 0x7f - invalid */
7708
7709
7710
7711/** Opcode 0x0f 0x80. */
7712FNIEMOP_DEF(iemOp_jo_Jv)
7713{
7714 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7715 IEMOP_HLP_MIN_386();
7716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7717 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7718 {
7719 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7721
7722 IEM_MC_BEGIN(0, 0);
7723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7724 IEM_MC_REL_JMP_S16(i16Imm);
7725 } IEM_MC_ELSE() {
7726 IEM_MC_ADVANCE_RIP();
7727 } IEM_MC_ENDIF();
7728 IEM_MC_END();
7729 }
7730 else
7731 {
7732 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7734
7735 IEM_MC_BEGIN(0, 0);
7736 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7737 IEM_MC_REL_JMP_S32(i32Imm);
7738 } IEM_MC_ELSE() {
7739 IEM_MC_ADVANCE_RIP();
7740 } IEM_MC_ENDIF();
7741 IEM_MC_END();
7742 }
7743 return VINF_SUCCESS;
7744}
7745
7746
7747/** Opcode 0x0f 0x81. */
7748FNIEMOP_DEF(iemOp_jno_Jv)
7749{
7750 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7751 IEMOP_HLP_MIN_386();
7752 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7753 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7754 {
7755 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7757
7758 IEM_MC_BEGIN(0, 0);
7759 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7760 IEM_MC_ADVANCE_RIP();
7761 } IEM_MC_ELSE() {
7762 IEM_MC_REL_JMP_S16(i16Imm);
7763 } IEM_MC_ENDIF();
7764 IEM_MC_END();
7765 }
7766 else
7767 {
7768 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7770
7771 IEM_MC_BEGIN(0, 0);
7772 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7773 IEM_MC_ADVANCE_RIP();
7774 } IEM_MC_ELSE() {
7775 IEM_MC_REL_JMP_S32(i32Imm);
7776 } IEM_MC_ENDIF();
7777 IEM_MC_END();
7778 }
7779 return VINF_SUCCESS;
7780}
7781
7782
7783/** Opcode 0x0f 0x82. */
7784FNIEMOP_DEF(iemOp_jc_Jv)
7785{
7786 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7787 IEMOP_HLP_MIN_386();
7788 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7789 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7790 {
7791 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7793
7794 IEM_MC_BEGIN(0, 0);
7795 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7796 IEM_MC_REL_JMP_S16(i16Imm);
7797 } IEM_MC_ELSE() {
7798 IEM_MC_ADVANCE_RIP();
7799 } IEM_MC_ENDIF();
7800 IEM_MC_END();
7801 }
7802 else
7803 {
7804 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7806
7807 IEM_MC_BEGIN(0, 0);
7808 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7809 IEM_MC_REL_JMP_S32(i32Imm);
7810 } IEM_MC_ELSE() {
7811 IEM_MC_ADVANCE_RIP();
7812 } IEM_MC_ENDIF();
7813 IEM_MC_END();
7814 }
7815 return VINF_SUCCESS;
7816}
7817
7818
7819/** Opcode 0x0f 0x83. */
7820FNIEMOP_DEF(iemOp_jnc_Jv)
7821{
7822 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7823 IEMOP_HLP_MIN_386();
7824 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7825 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7826 {
7827 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7829
7830 IEM_MC_BEGIN(0, 0);
7831 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7832 IEM_MC_ADVANCE_RIP();
7833 } IEM_MC_ELSE() {
7834 IEM_MC_REL_JMP_S16(i16Imm);
7835 } IEM_MC_ENDIF();
7836 IEM_MC_END();
7837 }
7838 else
7839 {
7840 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7842
7843 IEM_MC_BEGIN(0, 0);
7844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7845 IEM_MC_ADVANCE_RIP();
7846 } IEM_MC_ELSE() {
7847 IEM_MC_REL_JMP_S32(i32Imm);
7848 } IEM_MC_ENDIF();
7849 IEM_MC_END();
7850 }
7851 return VINF_SUCCESS;
7852}
7853
7854
7855/** Opcode 0x0f 0x84. */
7856FNIEMOP_DEF(iemOp_je_Jv)
7857{
7858 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7859 IEMOP_HLP_MIN_386();
7860 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7861 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7862 {
7863 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7865
7866 IEM_MC_BEGIN(0, 0);
7867 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7868 IEM_MC_REL_JMP_S16(i16Imm);
7869 } IEM_MC_ELSE() {
7870 IEM_MC_ADVANCE_RIP();
7871 } IEM_MC_ENDIF();
7872 IEM_MC_END();
7873 }
7874 else
7875 {
7876 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7878
7879 IEM_MC_BEGIN(0, 0);
7880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7881 IEM_MC_REL_JMP_S32(i32Imm);
7882 } IEM_MC_ELSE() {
7883 IEM_MC_ADVANCE_RIP();
7884 } IEM_MC_ENDIF();
7885 IEM_MC_END();
7886 }
7887 return VINF_SUCCESS;
7888}
7889
7890
7891/** Opcode 0x0f 0x85. */
7892FNIEMOP_DEF(iemOp_jne_Jv)
7893{
7894 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7895 IEMOP_HLP_MIN_386();
7896 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7897 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7898 {
7899 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7901
7902 IEM_MC_BEGIN(0, 0);
7903 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7904 IEM_MC_ADVANCE_RIP();
7905 } IEM_MC_ELSE() {
7906 IEM_MC_REL_JMP_S16(i16Imm);
7907 } IEM_MC_ENDIF();
7908 IEM_MC_END();
7909 }
7910 else
7911 {
7912 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7914
7915 IEM_MC_BEGIN(0, 0);
7916 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7917 IEM_MC_ADVANCE_RIP();
7918 } IEM_MC_ELSE() {
7919 IEM_MC_REL_JMP_S32(i32Imm);
7920 } IEM_MC_ENDIF();
7921 IEM_MC_END();
7922 }
7923 return VINF_SUCCESS;
7924}
7925
7926
7927/** Opcode 0x0f 0x86. */
7928FNIEMOP_DEF(iemOp_jbe_Jv)
7929{
7930 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7931 IEMOP_HLP_MIN_386();
7932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7933 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7934 {
7935 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7937
7938 IEM_MC_BEGIN(0, 0);
7939 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7940 IEM_MC_REL_JMP_S16(i16Imm);
7941 } IEM_MC_ELSE() {
7942 IEM_MC_ADVANCE_RIP();
7943 } IEM_MC_ENDIF();
7944 IEM_MC_END();
7945 }
7946 else
7947 {
7948 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7950
7951 IEM_MC_BEGIN(0, 0);
7952 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7953 IEM_MC_REL_JMP_S32(i32Imm);
7954 } IEM_MC_ELSE() {
7955 IEM_MC_ADVANCE_RIP();
7956 } IEM_MC_ENDIF();
7957 IEM_MC_END();
7958 }
7959 return VINF_SUCCESS;
7960}
7961
7962
7963/** Opcode 0x0f 0x87. */
7964FNIEMOP_DEF(iemOp_jnbe_Jv)
7965{
7966 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7967 IEMOP_HLP_MIN_386();
7968 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7969 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7970 {
7971 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7973
7974 IEM_MC_BEGIN(0, 0);
7975 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7976 IEM_MC_ADVANCE_RIP();
7977 } IEM_MC_ELSE() {
7978 IEM_MC_REL_JMP_S16(i16Imm);
7979 } IEM_MC_ENDIF();
7980 IEM_MC_END();
7981 }
7982 else
7983 {
7984 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7986
7987 IEM_MC_BEGIN(0, 0);
7988 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7989 IEM_MC_ADVANCE_RIP();
7990 } IEM_MC_ELSE() {
7991 IEM_MC_REL_JMP_S32(i32Imm);
7992 } IEM_MC_ENDIF();
7993 IEM_MC_END();
7994 }
7995 return VINF_SUCCESS;
7996}
7997
7998
7999/** Opcode 0x0f 0x88. */
8000FNIEMOP_DEF(iemOp_js_Jv)
8001{
8002 IEMOP_MNEMONIC(js_Jv, "js Jv");
8003 IEMOP_HLP_MIN_386();
8004 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8005 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8006 {
8007 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8009
8010 IEM_MC_BEGIN(0, 0);
8011 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8012 IEM_MC_REL_JMP_S16(i16Imm);
8013 } IEM_MC_ELSE() {
8014 IEM_MC_ADVANCE_RIP();
8015 } IEM_MC_ENDIF();
8016 IEM_MC_END();
8017 }
8018 else
8019 {
8020 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8022
8023 IEM_MC_BEGIN(0, 0);
8024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8025 IEM_MC_REL_JMP_S32(i32Imm);
8026 } IEM_MC_ELSE() {
8027 IEM_MC_ADVANCE_RIP();
8028 } IEM_MC_ENDIF();
8029 IEM_MC_END();
8030 }
8031 return VINF_SUCCESS;
8032}
8033
8034
8035/** Opcode 0x0f 0x89. */
8036FNIEMOP_DEF(iemOp_jns_Jv)
8037{
8038 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8039 IEMOP_HLP_MIN_386();
8040 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8041 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8042 {
8043 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8045
8046 IEM_MC_BEGIN(0, 0);
8047 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8048 IEM_MC_ADVANCE_RIP();
8049 } IEM_MC_ELSE() {
8050 IEM_MC_REL_JMP_S16(i16Imm);
8051 } IEM_MC_ENDIF();
8052 IEM_MC_END();
8053 }
8054 else
8055 {
8056 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8058
8059 IEM_MC_BEGIN(0, 0);
8060 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8061 IEM_MC_ADVANCE_RIP();
8062 } IEM_MC_ELSE() {
8063 IEM_MC_REL_JMP_S32(i32Imm);
8064 } IEM_MC_ENDIF();
8065 IEM_MC_END();
8066 }
8067 return VINF_SUCCESS;
8068}
8069
8070
8071/** Opcode 0x0f 0x8a. */
8072FNIEMOP_DEF(iemOp_jp_Jv)
8073{
8074 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8075 IEMOP_HLP_MIN_386();
8076 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8077 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8078 {
8079 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8081
8082 IEM_MC_BEGIN(0, 0);
8083 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8084 IEM_MC_REL_JMP_S16(i16Imm);
8085 } IEM_MC_ELSE() {
8086 IEM_MC_ADVANCE_RIP();
8087 } IEM_MC_ENDIF();
8088 IEM_MC_END();
8089 }
8090 else
8091 {
8092 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8094
8095 IEM_MC_BEGIN(0, 0);
8096 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8097 IEM_MC_REL_JMP_S32(i32Imm);
8098 } IEM_MC_ELSE() {
8099 IEM_MC_ADVANCE_RIP();
8100 } IEM_MC_ENDIF();
8101 IEM_MC_END();
8102 }
8103 return VINF_SUCCESS;
8104}
8105
8106
8107/** Opcode 0x0f 0x8b. */
8108FNIEMOP_DEF(iemOp_jnp_Jv)
8109{
8110 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8111 IEMOP_HLP_MIN_386();
8112 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8113 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8114 {
8115 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8117
8118 IEM_MC_BEGIN(0, 0);
8119 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8120 IEM_MC_ADVANCE_RIP();
8121 } IEM_MC_ELSE() {
8122 IEM_MC_REL_JMP_S16(i16Imm);
8123 } IEM_MC_ENDIF();
8124 IEM_MC_END();
8125 }
8126 else
8127 {
8128 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8130
8131 IEM_MC_BEGIN(0, 0);
8132 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8133 IEM_MC_ADVANCE_RIP();
8134 } IEM_MC_ELSE() {
8135 IEM_MC_REL_JMP_S32(i32Imm);
8136 } IEM_MC_ENDIF();
8137 IEM_MC_END();
8138 }
8139 return VINF_SUCCESS;
8140}
8141
8142
8143/** Opcode 0x0f 0x8c. */
8144FNIEMOP_DEF(iemOp_jl_Jv)
8145{
8146 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8147 IEMOP_HLP_MIN_386();
8148 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8149 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8150 {
8151 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8153
8154 IEM_MC_BEGIN(0, 0);
8155 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8156 IEM_MC_REL_JMP_S16(i16Imm);
8157 } IEM_MC_ELSE() {
8158 IEM_MC_ADVANCE_RIP();
8159 } IEM_MC_ENDIF();
8160 IEM_MC_END();
8161 }
8162 else
8163 {
8164 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8166
8167 IEM_MC_BEGIN(0, 0);
8168 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8169 IEM_MC_REL_JMP_S32(i32Imm);
8170 } IEM_MC_ELSE() {
8171 IEM_MC_ADVANCE_RIP();
8172 } IEM_MC_ENDIF();
8173 IEM_MC_END();
8174 }
8175 return VINF_SUCCESS;
8176}
8177
8178
8179/** Opcode 0x0f 0x8d. */
8180FNIEMOP_DEF(iemOp_jnl_Jv)
8181{
8182 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8183 IEMOP_HLP_MIN_386();
8184 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8185 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8186 {
8187 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8189
8190 IEM_MC_BEGIN(0, 0);
8191 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8192 IEM_MC_ADVANCE_RIP();
8193 } IEM_MC_ELSE() {
8194 IEM_MC_REL_JMP_S16(i16Imm);
8195 } IEM_MC_ENDIF();
8196 IEM_MC_END();
8197 }
8198 else
8199 {
8200 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8202
8203 IEM_MC_BEGIN(0, 0);
8204 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8205 IEM_MC_ADVANCE_RIP();
8206 } IEM_MC_ELSE() {
8207 IEM_MC_REL_JMP_S32(i32Imm);
8208 } IEM_MC_ENDIF();
8209 IEM_MC_END();
8210 }
8211 return VINF_SUCCESS;
8212}
8213
8214
8215/** Opcode 0x0f 0x8e. */
8216FNIEMOP_DEF(iemOp_jle_Jv)
8217{
8218 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8219 IEMOP_HLP_MIN_386();
8220 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8221 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8222 {
8223 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8225
8226 IEM_MC_BEGIN(0, 0);
8227 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8228 IEM_MC_REL_JMP_S16(i16Imm);
8229 } IEM_MC_ELSE() {
8230 IEM_MC_ADVANCE_RIP();
8231 } IEM_MC_ENDIF();
8232 IEM_MC_END();
8233 }
8234 else
8235 {
8236 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8238
8239 IEM_MC_BEGIN(0, 0);
8240 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8241 IEM_MC_REL_JMP_S32(i32Imm);
8242 } IEM_MC_ELSE() {
8243 IEM_MC_ADVANCE_RIP();
8244 } IEM_MC_ENDIF();
8245 IEM_MC_END();
8246 }
8247 return VINF_SUCCESS;
8248}
8249
8250
8251/** Opcode 0x0f 0x8f. */
8252FNIEMOP_DEF(iemOp_jnle_Jv)
8253{
8254 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8255 IEMOP_HLP_MIN_386();
8256 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8257 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8258 {
8259 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8261
8262 IEM_MC_BEGIN(0, 0);
8263 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8264 IEM_MC_ADVANCE_RIP();
8265 } IEM_MC_ELSE() {
8266 IEM_MC_REL_JMP_S16(i16Imm);
8267 } IEM_MC_ENDIF();
8268 IEM_MC_END();
8269 }
8270 else
8271 {
8272 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8274
8275 IEM_MC_BEGIN(0, 0);
8276 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8277 IEM_MC_ADVANCE_RIP();
8278 } IEM_MC_ELSE() {
8279 IEM_MC_REL_JMP_S32(i32Imm);
8280 } IEM_MC_ENDIF();
8281 IEM_MC_END();
8282 }
8283 return VINF_SUCCESS;
8284}
8285
8286
8287/** Opcode 0x0f 0x90. */
8288FNIEMOP_DEF(iemOp_seto_Eb)
8289{
8290 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8291 IEMOP_HLP_MIN_386();
8292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8293
8294 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8295 * any way. AMD says it's "unused", whatever that means. We're
8296 * ignoring for now. */
8297 if (IEM_IS_MODRM_REG_MODE(bRm))
8298 {
8299 /* register target */
8300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8301 IEM_MC_BEGIN(0, 0);
8302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8303 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8304 } IEM_MC_ELSE() {
8305 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8306 } IEM_MC_ENDIF();
8307 IEM_MC_ADVANCE_RIP();
8308 IEM_MC_END();
8309 }
8310 else
8311 {
8312 /* memory target */
8313 IEM_MC_BEGIN(0, 1);
8314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8317 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8318 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8319 } IEM_MC_ELSE() {
8320 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8321 } IEM_MC_ENDIF();
8322 IEM_MC_ADVANCE_RIP();
8323 IEM_MC_END();
8324 }
8325 return VINF_SUCCESS;
8326}
8327
8328
8329/** Opcode 0x0f 0x91. */
8330FNIEMOP_DEF(iemOp_setno_Eb)
8331{
8332 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8333 IEMOP_HLP_MIN_386();
8334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8335
8336 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8337 * any way. AMD says it's "unused", whatever that means. We're
8338 * ignoring for now. */
8339 if (IEM_IS_MODRM_REG_MODE(bRm))
8340 {
8341 /* register target */
8342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8343 IEM_MC_BEGIN(0, 0);
8344 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8345 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8346 } IEM_MC_ELSE() {
8347 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8348 } IEM_MC_ENDIF();
8349 IEM_MC_ADVANCE_RIP();
8350 IEM_MC_END();
8351 }
8352 else
8353 {
8354 /* memory target */
8355 IEM_MC_BEGIN(0, 1);
8356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8359 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8360 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8361 } IEM_MC_ELSE() {
8362 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8363 } IEM_MC_ENDIF();
8364 IEM_MC_ADVANCE_RIP();
8365 IEM_MC_END();
8366 }
8367 return VINF_SUCCESS;
8368}
8369
8370
8371/** Opcode 0x0f 0x92. */
8372FNIEMOP_DEF(iemOp_setc_Eb)
8373{
8374 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8375 IEMOP_HLP_MIN_386();
8376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8377
8378 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8379 * any way. AMD says it's "unused", whatever that means. We're
8380 * ignoring for now. */
8381 if (IEM_IS_MODRM_REG_MODE(bRm))
8382 {
8383 /* register target */
8384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8385 IEM_MC_BEGIN(0, 0);
8386 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8387 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8388 } IEM_MC_ELSE() {
8389 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8390 } IEM_MC_ENDIF();
8391 IEM_MC_ADVANCE_RIP();
8392 IEM_MC_END();
8393 }
8394 else
8395 {
8396 /* memory target */
8397 IEM_MC_BEGIN(0, 1);
8398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8401 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8402 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8403 } IEM_MC_ELSE() {
8404 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8405 } IEM_MC_ENDIF();
8406 IEM_MC_ADVANCE_RIP();
8407 IEM_MC_END();
8408 }
8409 return VINF_SUCCESS;
8410}
8411
8412
8413/** Opcode 0x0f 0x93. */
8414FNIEMOP_DEF(iemOp_setnc_Eb)
8415{
8416 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8417 IEMOP_HLP_MIN_386();
8418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8419
8420 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8421 * any way. AMD says it's "unused", whatever that means. We're
8422 * ignoring for now. */
8423 if (IEM_IS_MODRM_REG_MODE(bRm))
8424 {
8425 /* register target */
8426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8427 IEM_MC_BEGIN(0, 0);
8428 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8429 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8430 } IEM_MC_ELSE() {
8431 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8432 } IEM_MC_ENDIF();
8433 IEM_MC_ADVANCE_RIP();
8434 IEM_MC_END();
8435 }
8436 else
8437 {
8438 /* memory target */
8439 IEM_MC_BEGIN(0, 1);
8440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8443 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8444 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8445 } IEM_MC_ELSE() {
8446 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8447 } IEM_MC_ENDIF();
8448 IEM_MC_ADVANCE_RIP();
8449 IEM_MC_END();
8450 }
8451 return VINF_SUCCESS;
8452}
8453
8454
8455/** Opcode 0x0f 0x94. */
8456FNIEMOP_DEF(iemOp_sete_Eb)
8457{
8458 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8459 IEMOP_HLP_MIN_386();
8460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8461
8462 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8463 * any way. AMD says it's "unused", whatever that means. We're
8464 * ignoring for now. */
8465 if (IEM_IS_MODRM_REG_MODE(bRm))
8466 {
8467 /* register target */
8468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8469 IEM_MC_BEGIN(0, 0);
8470 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8471 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8472 } IEM_MC_ELSE() {
8473 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8474 } IEM_MC_ENDIF();
8475 IEM_MC_ADVANCE_RIP();
8476 IEM_MC_END();
8477 }
8478 else
8479 {
8480 /* memory target */
8481 IEM_MC_BEGIN(0, 1);
8482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8485 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8486 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8487 } IEM_MC_ELSE() {
8488 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8489 } IEM_MC_ENDIF();
8490 IEM_MC_ADVANCE_RIP();
8491 IEM_MC_END();
8492 }
8493 return VINF_SUCCESS;
8494}
8495
8496
8497/** Opcode 0x0f 0x95. */
8498FNIEMOP_DEF(iemOp_setne_Eb)
8499{
8500 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8501 IEMOP_HLP_MIN_386();
8502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8503
8504 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8505 * any way. AMD says it's "unused", whatever that means. We're
8506 * ignoring for now. */
8507 if (IEM_IS_MODRM_REG_MODE(bRm))
8508 {
8509 /* register target */
8510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8511 IEM_MC_BEGIN(0, 0);
8512 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8513 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8514 } IEM_MC_ELSE() {
8515 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8516 } IEM_MC_ENDIF();
8517 IEM_MC_ADVANCE_RIP();
8518 IEM_MC_END();
8519 }
8520 else
8521 {
8522 /* memory target */
8523 IEM_MC_BEGIN(0, 1);
8524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8527 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8528 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8529 } IEM_MC_ELSE() {
8530 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8531 } IEM_MC_ENDIF();
8532 IEM_MC_ADVANCE_RIP();
8533 IEM_MC_END();
8534 }
8535 return VINF_SUCCESS;
8536}
8537
8538
8539/** Opcode 0x0f 0x96. */
8540FNIEMOP_DEF(iemOp_setbe_Eb)
8541{
8542 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8543 IEMOP_HLP_MIN_386();
8544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8545
8546 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8547 * any way. AMD says it's "unused", whatever that means. We're
8548 * ignoring for now. */
8549 if (IEM_IS_MODRM_REG_MODE(bRm))
8550 {
8551 /* register target */
8552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8553 IEM_MC_BEGIN(0, 0);
8554 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8555 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8556 } IEM_MC_ELSE() {
8557 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8558 } IEM_MC_ENDIF();
8559 IEM_MC_ADVANCE_RIP();
8560 IEM_MC_END();
8561 }
8562 else
8563 {
8564 /* memory target */
8565 IEM_MC_BEGIN(0, 1);
8566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8569 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8570 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8571 } IEM_MC_ELSE() {
8572 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8573 } IEM_MC_ENDIF();
8574 IEM_MC_ADVANCE_RIP();
8575 IEM_MC_END();
8576 }
8577 return VINF_SUCCESS;
8578}
8579
8580
8581/** Opcode 0x0f 0x97. */
8582FNIEMOP_DEF(iemOp_setnbe_Eb)
8583{
8584 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8585 IEMOP_HLP_MIN_386();
8586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8587
8588 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8589 * any way. AMD says it's "unused", whatever that means. We're
8590 * ignoring for now. */
8591 if (IEM_IS_MODRM_REG_MODE(bRm))
8592 {
8593 /* register target */
8594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8595 IEM_MC_BEGIN(0, 0);
8596 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8597 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8598 } IEM_MC_ELSE() {
8599 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8600 } IEM_MC_ENDIF();
8601 IEM_MC_ADVANCE_RIP();
8602 IEM_MC_END();
8603 }
8604 else
8605 {
8606 /* memory target */
8607 IEM_MC_BEGIN(0, 1);
8608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8611 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8612 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8613 } IEM_MC_ELSE() {
8614 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8615 } IEM_MC_ENDIF();
8616 IEM_MC_ADVANCE_RIP();
8617 IEM_MC_END();
8618 }
8619 return VINF_SUCCESS;
8620}
8621
8622
8623/** Opcode 0x0f 0x98. */
8624FNIEMOP_DEF(iemOp_sets_Eb)
8625{
8626 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8627 IEMOP_HLP_MIN_386();
8628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8629
8630 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8631 * any way. AMD says it's "unused", whatever that means. We're
8632 * ignoring for now. */
8633 if (IEM_IS_MODRM_REG_MODE(bRm))
8634 {
8635 /* register target */
8636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8637 IEM_MC_BEGIN(0, 0);
8638 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8639 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8640 } IEM_MC_ELSE() {
8641 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8642 } IEM_MC_ENDIF();
8643 IEM_MC_ADVANCE_RIP();
8644 IEM_MC_END();
8645 }
8646 else
8647 {
8648 /* memory target */
8649 IEM_MC_BEGIN(0, 1);
8650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8653 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8654 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8655 } IEM_MC_ELSE() {
8656 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8657 } IEM_MC_ENDIF();
8658 IEM_MC_ADVANCE_RIP();
8659 IEM_MC_END();
8660 }
8661 return VINF_SUCCESS;
8662}
8663
8664
8665/** Opcode 0x0f 0x99. */
8666FNIEMOP_DEF(iemOp_setns_Eb)
8667{
8668 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8669 IEMOP_HLP_MIN_386();
8670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8671
8672 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8673 * any way. AMD says it's "unused", whatever that means. We're
8674 * ignoring for now. */
8675 if (IEM_IS_MODRM_REG_MODE(bRm))
8676 {
8677 /* register target */
8678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8679 IEM_MC_BEGIN(0, 0);
8680 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8681 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8682 } IEM_MC_ELSE() {
8683 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8684 } IEM_MC_ENDIF();
8685 IEM_MC_ADVANCE_RIP();
8686 IEM_MC_END();
8687 }
8688 else
8689 {
8690 /* memory target */
8691 IEM_MC_BEGIN(0, 1);
8692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8695 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8696 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8697 } IEM_MC_ELSE() {
8698 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8699 } IEM_MC_ENDIF();
8700 IEM_MC_ADVANCE_RIP();
8701 IEM_MC_END();
8702 }
8703 return VINF_SUCCESS;
8704}
8705
8706
8707/** Opcode 0x0f 0x9a. */
8708FNIEMOP_DEF(iemOp_setp_Eb)
8709{
8710 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8711 IEMOP_HLP_MIN_386();
8712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8713
8714 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8715 * any way. AMD says it's "unused", whatever that means. We're
8716 * ignoring for now. */
8717 if (IEM_IS_MODRM_REG_MODE(bRm))
8718 {
8719 /* register target */
8720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8721 IEM_MC_BEGIN(0, 0);
8722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8723 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8724 } IEM_MC_ELSE() {
8725 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8726 } IEM_MC_ENDIF();
8727 IEM_MC_ADVANCE_RIP();
8728 IEM_MC_END();
8729 }
8730 else
8731 {
8732 /* memory target */
8733 IEM_MC_BEGIN(0, 1);
8734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8737 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8738 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8739 } IEM_MC_ELSE() {
8740 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8741 } IEM_MC_ENDIF();
8742 IEM_MC_ADVANCE_RIP();
8743 IEM_MC_END();
8744 }
8745 return VINF_SUCCESS;
8746}
8747
8748
8749/** Opcode 0x0f 0x9b. */
8750FNIEMOP_DEF(iemOp_setnp_Eb)
8751{
8752 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8753 IEMOP_HLP_MIN_386();
8754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8755
8756 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8757 * any way. AMD says it's "unused", whatever that means. We're
8758 * ignoring for now. */
8759 if (IEM_IS_MODRM_REG_MODE(bRm))
8760 {
8761 /* register target */
8762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8763 IEM_MC_BEGIN(0, 0);
8764 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8765 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8766 } IEM_MC_ELSE() {
8767 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8768 } IEM_MC_ENDIF();
8769 IEM_MC_ADVANCE_RIP();
8770 IEM_MC_END();
8771 }
8772 else
8773 {
8774 /* memory target */
8775 IEM_MC_BEGIN(0, 1);
8776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8779 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8780 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8781 } IEM_MC_ELSE() {
8782 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8783 } IEM_MC_ENDIF();
8784 IEM_MC_ADVANCE_RIP();
8785 IEM_MC_END();
8786 }
8787 return VINF_SUCCESS;
8788}
8789
8790
8791/** Opcode 0x0f 0x9c. */
8792FNIEMOP_DEF(iemOp_setl_Eb)
8793{
8794 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8795 IEMOP_HLP_MIN_386();
8796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8797
8798 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8799 * any way. AMD says it's "unused", whatever that means. We're
8800 * ignoring for now. */
8801 if (IEM_IS_MODRM_REG_MODE(bRm))
8802 {
8803 /* register target */
8804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8805 IEM_MC_BEGIN(0, 0);
8806 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8807 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8808 } IEM_MC_ELSE() {
8809 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8810 } IEM_MC_ENDIF();
8811 IEM_MC_ADVANCE_RIP();
8812 IEM_MC_END();
8813 }
8814 else
8815 {
8816 /* memory target */
8817 IEM_MC_BEGIN(0, 1);
8818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8821 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8822 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8823 } IEM_MC_ELSE() {
8824 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8825 } IEM_MC_ENDIF();
8826 IEM_MC_ADVANCE_RIP();
8827 IEM_MC_END();
8828 }
8829 return VINF_SUCCESS;
8830}
8831
8832
8833/** Opcode 0x0f 0x9d. */
8834FNIEMOP_DEF(iemOp_setnl_Eb)
8835{
8836 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8837 IEMOP_HLP_MIN_386();
8838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8839
8840 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8841 * any way. AMD says it's "unused", whatever that means. We're
8842 * ignoring for now. */
8843 if (IEM_IS_MODRM_REG_MODE(bRm))
8844 {
8845 /* register target */
8846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8847 IEM_MC_BEGIN(0, 0);
8848 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8849 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8850 } IEM_MC_ELSE() {
8851 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8852 } IEM_MC_ENDIF();
8853 IEM_MC_ADVANCE_RIP();
8854 IEM_MC_END();
8855 }
8856 else
8857 {
8858 /* memory target */
8859 IEM_MC_BEGIN(0, 1);
8860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8863 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8864 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8865 } IEM_MC_ELSE() {
8866 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8867 } IEM_MC_ENDIF();
8868 IEM_MC_ADVANCE_RIP();
8869 IEM_MC_END();
8870 }
8871 return VINF_SUCCESS;
8872}
8873
8874
8875/** Opcode 0x0f 0x9e. */
8876FNIEMOP_DEF(iemOp_setle_Eb)
8877{
8878 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8879 IEMOP_HLP_MIN_386();
8880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8881
8882 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8883 * any way. AMD says it's "unused", whatever that means. We're
8884 * ignoring for now. */
8885 if (IEM_IS_MODRM_REG_MODE(bRm))
8886 {
8887 /* register target */
8888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8889 IEM_MC_BEGIN(0, 0);
8890 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8891 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8892 } IEM_MC_ELSE() {
8893 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8894 } IEM_MC_ENDIF();
8895 IEM_MC_ADVANCE_RIP();
8896 IEM_MC_END();
8897 }
8898 else
8899 {
8900 /* memory target */
8901 IEM_MC_BEGIN(0, 1);
8902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8905 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8906 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8907 } IEM_MC_ELSE() {
8908 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8909 } IEM_MC_ENDIF();
8910 IEM_MC_ADVANCE_RIP();
8911 IEM_MC_END();
8912 }
8913 return VINF_SUCCESS;
8914}
8915
8916
8917/** Opcode 0x0f 0x9f. */
8918FNIEMOP_DEF(iemOp_setnle_Eb)
8919{
8920 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8921 IEMOP_HLP_MIN_386();
8922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8923
8924 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8925 * any way. AMD says it's "unused", whatever that means. We're
8926 * ignoring for now. */
8927 if (IEM_IS_MODRM_REG_MODE(bRm))
8928 {
8929 /* register target */
8930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8931 IEM_MC_BEGIN(0, 0);
8932 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8933 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8934 } IEM_MC_ELSE() {
8935 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8936 } IEM_MC_ENDIF();
8937 IEM_MC_ADVANCE_RIP();
8938 IEM_MC_END();
8939 }
8940 else
8941 {
8942 /* memory target */
8943 IEM_MC_BEGIN(0, 1);
8944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8947 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8948 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8949 } IEM_MC_ELSE() {
8950 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8951 } IEM_MC_ENDIF();
8952 IEM_MC_ADVANCE_RIP();
8953 IEM_MC_END();
8954 }
8955 return VINF_SUCCESS;
8956}
8957
8958
8959/**
8960 * Common 'push segment-register' helper.
8961 */
8962FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8963{
8964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8965 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8967
8968 switch (pVCpu->iem.s.enmEffOpSize)
8969 {
8970 case IEMMODE_16BIT:
8971 IEM_MC_BEGIN(0, 1);
8972 IEM_MC_LOCAL(uint16_t, u16Value);
8973 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8974 IEM_MC_PUSH_U16(u16Value);
8975 IEM_MC_ADVANCE_RIP();
8976 IEM_MC_END();
8977 break;
8978
8979 case IEMMODE_32BIT:
8980 IEM_MC_BEGIN(0, 1);
8981 IEM_MC_LOCAL(uint32_t, u32Value);
8982 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8983 IEM_MC_PUSH_U32_SREG(u32Value);
8984 IEM_MC_ADVANCE_RIP();
8985 IEM_MC_END();
8986 break;
8987
8988 case IEMMODE_64BIT:
8989 IEM_MC_BEGIN(0, 1);
8990 IEM_MC_LOCAL(uint64_t, u64Value);
8991 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8992 IEM_MC_PUSH_U64(u64Value);
8993 IEM_MC_ADVANCE_RIP();
8994 IEM_MC_END();
8995 break;
8996 }
8997
8998 return VINF_SUCCESS;
8999}
9000
9001
9002/** Opcode 0x0f 0xa0. */
9003FNIEMOP_DEF(iemOp_push_fs)
9004{
9005 IEMOP_MNEMONIC(push_fs, "push fs");
9006 IEMOP_HLP_MIN_386();
9007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9008 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
9009}
9010
9011
9012/** Opcode 0x0f 0xa1. */
9013FNIEMOP_DEF(iemOp_pop_fs)
9014{
9015 IEMOP_MNEMONIC(pop_fs, "pop fs");
9016 IEMOP_HLP_MIN_386();
9017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9018 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
9019}
9020
9021
9022/** Opcode 0x0f 0xa2. */
9023FNIEMOP_DEF(iemOp_cpuid)
9024{
9025 IEMOP_MNEMONIC(cpuid, "cpuid");
9026 IEMOP_HLP_MIN_486(); /* not all 486es. */
9027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9028 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
9029}
9030
9031
9032/**
9033 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
9034 * iemOp_bts_Ev_Gv.
9035 */
9036FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
9037{
9038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9039 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9040
9041 if (IEM_IS_MODRM_REG_MODE(bRm))
9042 {
9043 /* register destination. */
9044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9045 switch (pVCpu->iem.s.enmEffOpSize)
9046 {
9047 case IEMMODE_16BIT:
9048 IEM_MC_BEGIN(3, 0);
9049 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9050 IEM_MC_ARG(uint16_t, u16Src, 1);
9051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9052
9053 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9054 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
9055 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9056 IEM_MC_REF_EFLAGS(pEFlags);
9057 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9058
9059 IEM_MC_ADVANCE_RIP();
9060 IEM_MC_END();
9061 return VINF_SUCCESS;
9062
9063 case IEMMODE_32BIT:
9064 IEM_MC_BEGIN(3, 0);
9065 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9066 IEM_MC_ARG(uint32_t, u32Src, 1);
9067 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9068
9069 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9070 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
9071 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9072 IEM_MC_REF_EFLAGS(pEFlags);
9073 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9074
9075 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9076 IEM_MC_ADVANCE_RIP();
9077 IEM_MC_END();
9078 return VINF_SUCCESS;
9079
9080 case IEMMODE_64BIT:
9081 IEM_MC_BEGIN(3, 0);
9082 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9083 IEM_MC_ARG(uint64_t, u64Src, 1);
9084 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9085
9086 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9087 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
9088 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9089 IEM_MC_REF_EFLAGS(pEFlags);
9090 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9091
9092 IEM_MC_ADVANCE_RIP();
9093 IEM_MC_END();
9094 return VINF_SUCCESS;
9095
9096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9097 }
9098 }
9099 else
9100 {
9101 /* memory destination. */
9102
9103 uint32_t fAccess;
9104 if (pImpl->pfnLockedU16)
9105 fAccess = IEM_ACCESS_DATA_RW;
9106 else /* BT */
9107 fAccess = IEM_ACCESS_DATA_R;
9108
9109 /** @todo test negative bit offsets! */
9110 switch (pVCpu->iem.s.enmEffOpSize)
9111 {
9112 case IEMMODE_16BIT:
9113 IEM_MC_BEGIN(3, 2);
9114 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9115 IEM_MC_ARG(uint16_t, u16Src, 1);
9116 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9118 IEM_MC_LOCAL(int16_t, i16AddrAdj);
9119
9120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9121 if (pImpl->pfnLockedU16)
9122 IEMOP_HLP_DONE_DECODING();
9123 else
9124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9125 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9126 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
9127 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
9128 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
9129 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
9130 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
9131 IEM_MC_FETCH_EFLAGS(EFlags);
9132
9133 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9134 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9135 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9136 else
9137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9138 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9139
9140 IEM_MC_COMMIT_EFLAGS(EFlags);
9141 IEM_MC_ADVANCE_RIP();
9142 IEM_MC_END();
9143 return VINF_SUCCESS;
9144
9145 case IEMMODE_32BIT:
9146 IEM_MC_BEGIN(3, 2);
9147 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9148 IEM_MC_ARG(uint32_t, u32Src, 1);
9149 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9151 IEM_MC_LOCAL(int32_t, i32AddrAdj);
9152
9153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9154 if (pImpl->pfnLockedU16)
9155 IEMOP_HLP_DONE_DECODING();
9156 else
9157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9158 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9159 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
9160 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
9161 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
9162 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
9163 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
9164 IEM_MC_FETCH_EFLAGS(EFlags);
9165
9166 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9169 else
9170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9171 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9172
9173 IEM_MC_COMMIT_EFLAGS(EFlags);
9174 IEM_MC_ADVANCE_RIP();
9175 IEM_MC_END();
9176 return VINF_SUCCESS;
9177
9178 case IEMMODE_64BIT:
9179 IEM_MC_BEGIN(3, 2);
9180 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9181 IEM_MC_ARG(uint64_t, u64Src, 1);
9182 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9184 IEM_MC_LOCAL(int64_t, i64AddrAdj);
9185
9186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9187 if (pImpl->pfnLockedU16)
9188 IEMOP_HLP_DONE_DECODING();
9189 else
9190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9191 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9192 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
9193 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
9194 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
9195 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
9196 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
9197 IEM_MC_FETCH_EFLAGS(EFlags);
9198
9199 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9200 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9202 else
9203 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9204 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9205
9206 IEM_MC_COMMIT_EFLAGS(EFlags);
9207 IEM_MC_ADVANCE_RIP();
9208 IEM_MC_END();
9209 return VINF_SUCCESS;
9210
9211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9212 }
9213 }
9214}
9215
9216
9217/** Opcode 0x0f 0xa3. */
9218FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9219{
9220 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9221 IEMOP_HLP_MIN_386();
9222 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
9223}
9224
9225
9226/**
9227 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9228 */
9229FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9230{
9231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9233
9234 if (IEM_IS_MODRM_REG_MODE(bRm))
9235 {
9236 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9238
9239 switch (pVCpu->iem.s.enmEffOpSize)
9240 {
9241 case IEMMODE_16BIT:
9242 IEM_MC_BEGIN(4, 0);
9243 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9244 IEM_MC_ARG(uint16_t, u16Src, 1);
9245 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9246 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9247
9248 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9249 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9250 IEM_MC_REF_EFLAGS(pEFlags);
9251 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9252
9253 IEM_MC_ADVANCE_RIP();
9254 IEM_MC_END();
9255 return VINF_SUCCESS;
9256
9257 case IEMMODE_32BIT:
9258 IEM_MC_BEGIN(4, 0);
9259 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9260 IEM_MC_ARG(uint32_t, u32Src, 1);
9261 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9262 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9263
9264 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9265 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9266 IEM_MC_REF_EFLAGS(pEFlags);
9267 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9268
9269 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9270 IEM_MC_ADVANCE_RIP();
9271 IEM_MC_END();
9272 return VINF_SUCCESS;
9273
9274 case IEMMODE_64BIT:
9275 IEM_MC_BEGIN(4, 0);
9276 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9277 IEM_MC_ARG(uint64_t, u64Src, 1);
9278 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9279 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9280
9281 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9282 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9283 IEM_MC_REF_EFLAGS(pEFlags);
9284 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9285
9286 IEM_MC_ADVANCE_RIP();
9287 IEM_MC_END();
9288 return VINF_SUCCESS;
9289
9290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9291 }
9292 }
9293 else
9294 {
9295 switch (pVCpu->iem.s.enmEffOpSize)
9296 {
9297 case IEMMODE_16BIT:
9298 IEM_MC_BEGIN(4, 2);
9299 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9300 IEM_MC_ARG(uint16_t, u16Src, 1);
9301 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9302 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9304
9305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9306 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9307 IEM_MC_ASSIGN(cShiftArg, cShift);
9308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9309 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9310 IEM_MC_FETCH_EFLAGS(EFlags);
9311 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9312 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9313
9314 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9315 IEM_MC_COMMIT_EFLAGS(EFlags);
9316 IEM_MC_ADVANCE_RIP();
9317 IEM_MC_END();
9318 return VINF_SUCCESS;
9319
9320 case IEMMODE_32BIT:
9321 IEM_MC_BEGIN(4, 2);
9322 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9323 IEM_MC_ARG(uint32_t, u32Src, 1);
9324 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9325 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9327
9328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9329 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9330 IEM_MC_ASSIGN(cShiftArg, cShift);
9331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9332 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9333 IEM_MC_FETCH_EFLAGS(EFlags);
9334 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9335 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9336
9337 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9338 IEM_MC_COMMIT_EFLAGS(EFlags);
9339 IEM_MC_ADVANCE_RIP();
9340 IEM_MC_END();
9341 return VINF_SUCCESS;
9342
9343 case IEMMODE_64BIT:
9344 IEM_MC_BEGIN(4, 2);
9345 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9346 IEM_MC_ARG(uint64_t, u64Src, 1);
9347 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9348 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9350
9351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9352 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9353 IEM_MC_ASSIGN(cShiftArg, cShift);
9354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9355 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9356 IEM_MC_FETCH_EFLAGS(EFlags);
9357 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9358 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9359
9360 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9361 IEM_MC_COMMIT_EFLAGS(EFlags);
9362 IEM_MC_ADVANCE_RIP();
9363 IEM_MC_END();
9364 return VINF_SUCCESS;
9365
9366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9367 }
9368 }
9369}
9370
9371
9372/**
9373 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9374 */
9375FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9376{
9377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9378 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9379
9380 if (IEM_IS_MODRM_REG_MODE(bRm))
9381 {
9382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9383
9384 switch (pVCpu->iem.s.enmEffOpSize)
9385 {
9386 case IEMMODE_16BIT:
9387 IEM_MC_BEGIN(4, 0);
9388 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9389 IEM_MC_ARG(uint16_t, u16Src, 1);
9390 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9391 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9392
9393 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9394 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9395 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9396 IEM_MC_REF_EFLAGS(pEFlags);
9397 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9398
9399 IEM_MC_ADVANCE_RIP();
9400 IEM_MC_END();
9401 return VINF_SUCCESS;
9402
9403 case IEMMODE_32BIT:
9404 IEM_MC_BEGIN(4, 0);
9405 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9406 IEM_MC_ARG(uint32_t, u32Src, 1);
9407 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9408 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9409
9410 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9411 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9412 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9413 IEM_MC_REF_EFLAGS(pEFlags);
9414 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9415
9416 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9417 IEM_MC_ADVANCE_RIP();
9418 IEM_MC_END();
9419 return VINF_SUCCESS;
9420
9421 case IEMMODE_64BIT:
9422 IEM_MC_BEGIN(4, 0);
9423 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9424 IEM_MC_ARG(uint64_t, u64Src, 1);
9425 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9426 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9427
9428 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9429 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9430 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9431 IEM_MC_REF_EFLAGS(pEFlags);
9432 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9433
9434 IEM_MC_ADVANCE_RIP();
9435 IEM_MC_END();
9436 return VINF_SUCCESS;
9437
9438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9439 }
9440 }
9441 else
9442 {
9443 switch (pVCpu->iem.s.enmEffOpSize)
9444 {
9445 case IEMMODE_16BIT:
9446 IEM_MC_BEGIN(4, 2);
9447 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9448 IEM_MC_ARG(uint16_t, u16Src, 1);
9449 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9450 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9452
9453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9455 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9456 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9457 IEM_MC_FETCH_EFLAGS(EFlags);
9458 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9459 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9460
9461 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9462 IEM_MC_COMMIT_EFLAGS(EFlags);
9463 IEM_MC_ADVANCE_RIP();
9464 IEM_MC_END();
9465 return VINF_SUCCESS;
9466
9467 case IEMMODE_32BIT:
9468 IEM_MC_BEGIN(4, 2);
9469 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9470 IEM_MC_ARG(uint32_t, u32Src, 1);
9471 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9472 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9474
9475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9477 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9478 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9479 IEM_MC_FETCH_EFLAGS(EFlags);
9480 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9481 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9482
9483 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9484 IEM_MC_COMMIT_EFLAGS(EFlags);
9485 IEM_MC_ADVANCE_RIP();
9486 IEM_MC_END();
9487 return VINF_SUCCESS;
9488
9489 case IEMMODE_64BIT:
9490 IEM_MC_BEGIN(4, 2);
9491 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9492 IEM_MC_ARG(uint64_t, u64Src, 1);
9493 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9494 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9496
9497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9499 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9500 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9501 IEM_MC_FETCH_EFLAGS(EFlags);
9502 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9503 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9504
9505 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9506 IEM_MC_COMMIT_EFLAGS(EFlags);
9507 IEM_MC_ADVANCE_RIP();
9508 IEM_MC_END();
9509 return VINF_SUCCESS;
9510
9511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9512 }
9513 }
9514}
9515
9516
9517
9518/** Opcode 0x0f 0xa4. */
9519FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9520{
9521 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9522 IEMOP_HLP_MIN_386();
9523 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9524}
9525
9526
9527/** Opcode 0x0f 0xa5. */
9528FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9529{
9530 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9531 IEMOP_HLP_MIN_386();
9532 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9533}
9534
9535
9536/** Opcode 0x0f 0xa8. */
9537FNIEMOP_DEF(iemOp_push_gs)
9538{
9539 IEMOP_MNEMONIC(push_gs, "push gs");
9540 IEMOP_HLP_MIN_386();
9541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9542 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9543}
9544
9545
9546/** Opcode 0x0f 0xa9. */
9547FNIEMOP_DEF(iemOp_pop_gs)
9548{
9549 IEMOP_MNEMONIC(pop_gs, "pop gs");
9550 IEMOP_HLP_MIN_386();
9551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9552 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9553}
9554
9555
9556/** Opcode 0x0f 0xaa. */
9557FNIEMOP_DEF(iemOp_rsm)
9558{
9559 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9560 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9562 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9563}
9564
9565
9566
9567/** Opcode 0x0f 0xab. */
9568FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9569{
9570 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9571 IEMOP_HLP_MIN_386();
9572 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
9573}
9574
9575
9576/** Opcode 0x0f 0xac. */
9577FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9578{
9579 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9580 IEMOP_HLP_MIN_386();
9581 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9582}
9583
9584
9585/** Opcode 0x0f 0xad. */
9586FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9587{
9588 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9589 IEMOP_HLP_MIN_386();
9590 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9591}
9592
9593
9594/** Opcode 0x0f 0xae mem/0. */
9595FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9596{
9597 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9598 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9599 return IEMOP_RAISE_INVALID_OPCODE();
9600
9601 IEM_MC_BEGIN(3, 1);
9602 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9603 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9604 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9607 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9608 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9609 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9610 IEM_MC_END();
9611 return VINF_SUCCESS;
9612}
9613
9614
9615/** Opcode 0x0f 0xae mem/1. */
9616FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9617{
9618 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9619 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9620 return IEMOP_RAISE_INVALID_OPCODE();
9621
9622 IEM_MC_BEGIN(3, 1);
9623 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9624 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9625 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9628 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9629 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9630 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9631 IEM_MC_END();
9632 return VINF_SUCCESS;
9633}
9634
9635
9636/**
9637 * @opmaps grp15
9638 * @opcode !11/2
9639 * @oppfx none
9640 * @opcpuid sse
9641 * @opgroup og_sse_mxcsrsm
9642 * @opxcpttype 5
9643 * @optest op1=0 -> mxcsr=0
9644 * @optest op1=0x2083 -> mxcsr=0x2083
9645 * @optest op1=0xfffffffe -> value.xcpt=0xd
9646 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9647 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9648 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9649 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9650 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9651 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9652 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9653 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9654 */
9655FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9656{
9657 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9658 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9659 return IEMOP_RAISE_INVALID_OPCODE();
9660
9661 IEM_MC_BEGIN(2, 0);
9662 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9663 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9667 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9668 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9669 IEM_MC_END();
9670 return VINF_SUCCESS;
9671}
9672
9673
9674/**
9675 * @opmaps grp15
9676 * @opcode !11/3
9677 * @oppfx none
9678 * @opcpuid sse
9679 * @opgroup og_sse_mxcsrsm
9680 * @opxcpttype 5
9681 * @optest mxcsr=0 -> op1=0
9682 * @optest mxcsr=0x2083 -> op1=0x2083
9683 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9684 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9685 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9686 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9687 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9688 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9689 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9690 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9691 */
9692FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9693{
9694 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9695 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9696 return IEMOP_RAISE_INVALID_OPCODE();
9697
9698 IEM_MC_BEGIN(2, 0);
9699 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9700 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9703 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9704 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9705 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9706 IEM_MC_END();
9707 return VINF_SUCCESS;
9708}
9709
9710
9711/**
9712 * @opmaps grp15
9713 * @opcode !11/4
9714 * @oppfx none
9715 * @opcpuid xsave
9716 * @opgroup og_system
9717 * @opxcpttype none
9718 */
9719FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9720{
9721 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9722 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9723 return IEMOP_RAISE_INVALID_OPCODE();
9724
9725 IEM_MC_BEGIN(3, 0);
9726 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9727 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9728 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9731 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9732 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9733 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9734 IEM_MC_END();
9735 return VINF_SUCCESS;
9736}
9737
9738
9739/**
9740 * @opmaps grp15
9741 * @opcode !11/5
9742 * @oppfx none
9743 * @opcpuid xsave
9744 * @opgroup og_system
9745 * @opxcpttype none
9746 */
9747FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9748{
9749 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9750 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9751 return IEMOP_RAISE_INVALID_OPCODE();
9752
9753 IEM_MC_BEGIN(3, 0);
9754 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9755 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9756 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9759 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9760 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9761 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9762 IEM_MC_END();
9763 return VINF_SUCCESS;
9764}
9765
9766/** Opcode 0x0f 0xae mem/6. */
9767FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9768
9769/**
9770 * @opmaps grp15
9771 * @opcode !11/7
9772 * @oppfx none
9773 * @opcpuid clfsh
9774 * @opgroup og_cachectl
9775 * @optest op1=1 ->
9776 */
9777FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9778{
9779 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9780 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9781 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9782
9783 IEM_MC_BEGIN(2, 0);
9784 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9785 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9788 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9789 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9790 IEM_MC_END();
9791 return VINF_SUCCESS;
9792}
9793
9794/**
9795 * @opmaps grp15
9796 * @opcode !11/7
9797 * @oppfx 0x66
9798 * @opcpuid clflushopt
9799 * @opgroup og_cachectl
9800 * @optest op1=1 ->
9801 */
9802FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9803{
9804 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9805 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9806 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9807
9808 IEM_MC_BEGIN(2, 0);
9809 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9810 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9813 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9814 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9815 IEM_MC_END();
9816 return VINF_SUCCESS;
9817}
9818
9819
9820/** Opcode 0x0f 0xae 11b/5. */
9821FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9822{
9823 RT_NOREF_PV(bRm);
9824 IEMOP_MNEMONIC(lfence, "lfence");
9825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9826 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9827 return IEMOP_RAISE_INVALID_OPCODE();
9828
9829 IEM_MC_BEGIN(0, 0);
9830#ifndef RT_ARCH_ARM64
9831 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9832#endif
9833 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9834#ifndef RT_ARCH_ARM64
9835 else
9836 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9837#endif
9838 IEM_MC_ADVANCE_RIP();
9839 IEM_MC_END();
9840 return VINF_SUCCESS;
9841}
9842
9843
9844/** Opcode 0x0f 0xae 11b/6. */
9845FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9846{
9847 RT_NOREF_PV(bRm);
9848 IEMOP_MNEMONIC(mfence, "mfence");
9849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9850 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9851 return IEMOP_RAISE_INVALID_OPCODE();
9852
9853 IEM_MC_BEGIN(0, 0);
9854#ifndef RT_ARCH_ARM64
9855 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9856#endif
9857 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9858#ifndef RT_ARCH_ARM64
9859 else
9860 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9861#endif
9862 IEM_MC_ADVANCE_RIP();
9863 IEM_MC_END();
9864 return VINF_SUCCESS;
9865}
9866
9867
9868/** Opcode 0x0f 0xae 11b/7. */
9869FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9870{
9871 RT_NOREF_PV(bRm);
9872 IEMOP_MNEMONIC(sfence, "sfence");
9873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9874 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9875 return IEMOP_RAISE_INVALID_OPCODE();
9876
9877 IEM_MC_BEGIN(0, 0);
9878#ifndef RT_ARCH_ARM64
9879 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9880#endif
9881 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9882#ifndef RT_ARCH_ARM64
9883 else
9884 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9885#endif
9886 IEM_MC_ADVANCE_RIP();
9887 IEM_MC_END();
9888 return VINF_SUCCESS;
9889}
9890
9891
9892/** Opcode 0xf3 0x0f 0xae 11b/0. */
9893FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9894{
9895 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9897 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9898 {
9899 IEM_MC_BEGIN(1, 0);
9900 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9901 IEM_MC_ARG(uint64_t, u64Dst, 0);
9902 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9903 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9904 IEM_MC_ADVANCE_RIP();
9905 IEM_MC_END();
9906 }
9907 else
9908 {
9909 IEM_MC_BEGIN(1, 0);
9910 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9911 IEM_MC_ARG(uint32_t, u32Dst, 0);
9912 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9913 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9914 IEM_MC_ADVANCE_RIP();
9915 IEM_MC_END();
9916 }
9917 return VINF_SUCCESS;
9918}
9919
9920
9921/** Opcode 0xf3 0x0f 0xae 11b/1. */
9922FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9923{
9924 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9926 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9927 {
9928 IEM_MC_BEGIN(1, 0);
9929 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9930 IEM_MC_ARG(uint64_t, u64Dst, 0);
9931 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9932 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9933 IEM_MC_ADVANCE_RIP();
9934 IEM_MC_END();
9935 }
9936 else
9937 {
9938 IEM_MC_BEGIN(1, 0);
9939 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9940 IEM_MC_ARG(uint32_t, u32Dst, 0);
9941 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9942 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9943 IEM_MC_ADVANCE_RIP();
9944 IEM_MC_END();
9945 }
9946 return VINF_SUCCESS;
9947}
9948
9949
9950/** Opcode 0xf3 0x0f 0xae 11b/2. */
9951FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9952{
9953 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9955 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9956 {
9957 IEM_MC_BEGIN(1, 0);
9958 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9959 IEM_MC_ARG(uint64_t, u64Dst, 0);
9960 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9961 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9962 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9963 IEM_MC_ADVANCE_RIP();
9964 IEM_MC_END();
9965 }
9966 else
9967 {
9968 IEM_MC_BEGIN(1, 0);
9969 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9970 IEM_MC_ARG(uint32_t, u32Dst, 0);
9971 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9972 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9973 IEM_MC_ADVANCE_RIP();
9974 IEM_MC_END();
9975 }
9976 return VINF_SUCCESS;
9977}
9978
9979
9980/** Opcode 0xf3 0x0f 0xae 11b/3. */
9981FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9982{
9983 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9985 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9986 {
9987 IEM_MC_BEGIN(1, 0);
9988 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9989 IEM_MC_ARG(uint64_t, u64Dst, 0);
9990 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9991 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9992 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9993 IEM_MC_ADVANCE_RIP();
9994 IEM_MC_END();
9995 }
9996 else
9997 {
9998 IEM_MC_BEGIN(1, 0);
9999 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10000 IEM_MC_ARG(uint32_t, u32Dst, 0);
10001 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10002 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10003 IEM_MC_ADVANCE_RIP();
10004 IEM_MC_END();
10005 }
10006 return VINF_SUCCESS;
10007}
10008
10009
10010/**
10011 * Group 15 jump table for register variant.
10012 */
10013IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10014{ /* pfx: none, 066h, 0f3h, 0f2h */
10015 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10016 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10017 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10018 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10019 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10020 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10021 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10022 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10023};
10024AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10025
10026
10027/**
10028 * Group 15 jump table for memory variant.
10029 */
10030IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10031{ /* pfx: none, 066h, 0f3h, 0f2h */
10032 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10033 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10034 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10035 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10036 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10037 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10038 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10039 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10040};
10041AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10042
10043
10044/** Opcode 0x0f 0xae. */
10045FNIEMOP_DEF(iemOp_Grp15)
10046{
10047 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10049 if (IEM_IS_MODRM_REG_MODE(bRm))
10050 /* register, register */
10051 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10052 + pVCpu->iem.s.idxPrefix], bRm);
10053 /* memory, register */
10054 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10055 + pVCpu->iem.s.idxPrefix], bRm);
10056}
10057
10058
10059/** Opcode 0x0f 0xaf. */
10060FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10061{
10062 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10063 IEMOP_HLP_MIN_386();
10064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10065 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
10066}
10067
10068
10069/** Opcode 0x0f 0xb0. */
10070FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10071{
10072 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10073 IEMOP_HLP_MIN_486();
10074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10075
10076 if (IEM_IS_MODRM_REG_MODE(bRm))
10077 {
10078 IEMOP_HLP_DONE_DECODING();
10079 IEM_MC_BEGIN(4, 0);
10080 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10081 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10082 IEM_MC_ARG(uint8_t, u8Src, 2);
10083 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10084
10085 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10086 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10087 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10088 IEM_MC_REF_EFLAGS(pEFlags);
10089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10090 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10091 else
10092 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10093
10094 IEM_MC_ADVANCE_RIP();
10095 IEM_MC_END();
10096 }
10097 else
10098 {
10099 IEM_MC_BEGIN(4, 3);
10100 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10101 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10102 IEM_MC_ARG(uint8_t, u8Src, 2);
10103 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10105 IEM_MC_LOCAL(uint8_t, u8Al);
10106
10107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10108 IEMOP_HLP_DONE_DECODING();
10109 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10110 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10111 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10112 IEM_MC_FETCH_EFLAGS(EFlags);
10113 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10114 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10115 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10116 else
10117 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10118
10119 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10120 IEM_MC_COMMIT_EFLAGS(EFlags);
10121 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10122 IEM_MC_ADVANCE_RIP();
10123 IEM_MC_END();
10124 }
10125 return VINF_SUCCESS;
10126}
10127
10128/** Opcode 0x0f 0xb1. */
10129FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10130{
10131 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10132 IEMOP_HLP_MIN_486();
10133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10134
10135 if (IEM_IS_MODRM_REG_MODE(bRm))
10136 {
10137 IEMOP_HLP_DONE_DECODING();
10138 switch (pVCpu->iem.s.enmEffOpSize)
10139 {
10140 case IEMMODE_16BIT:
10141 IEM_MC_BEGIN(4, 0);
10142 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10143 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10144 IEM_MC_ARG(uint16_t, u16Src, 2);
10145 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10146
10147 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10148 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10149 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10150 IEM_MC_REF_EFLAGS(pEFlags);
10151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10152 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10153 else
10154 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10155
10156 IEM_MC_ADVANCE_RIP();
10157 IEM_MC_END();
10158 return VINF_SUCCESS;
10159
10160 case IEMMODE_32BIT:
10161 IEM_MC_BEGIN(4, 0);
10162 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10163 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10164 IEM_MC_ARG(uint32_t, u32Src, 2);
10165 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10166
10167 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10168 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10169 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10170 IEM_MC_REF_EFLAGS(pEFlags);
10171 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10172 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10173 else
10174 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10175
10176 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10177 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10178 } IEM_MC_ELSE() {
10179 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10180 } IEM_MC_ENDIF();
10181
10182 IEM_MC_ADVANCE_RIP();
10183 IEM_MC_END();
10184 return VINF_SUCCESS;
10185
10186 case IEMMODE_64BIT:
10187 IEM_MC_BEGIN(4, 0);
10188 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10189 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10190#ifdef RT_ARCH_X86
10191 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10192#else
10193 IEM_MC_ARG(uint64_t, u64Src, 2);
10194#endif
10195 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10196
10197 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10198 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10199 IEM_MC_REF_EFLAGS(pEFlags);
10200#ifdef RT_ARCH_X86
10201 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10202 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10203 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10204 else
10205 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10206#else
10207 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10209 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10210 else
10211 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10212#endif
10213
10214 IEM_MC_ADVANCE_RIP();
10215 IEM_MC_END();
10216 return VINF_SUCCESS;
10217
10218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10219 }
10220 }
10221 else
10222 {
10223 switch (pVCpu->iem.s.enmEffOpSize)
10224 {
10225 case IEMMODE_16BIT:
10226 IEM_MC_BEGIN(4, 3);
10227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10228 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10229 IEM_MC_ARG(uint16_t, u16Src, 2);
10230 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10232 IEM_MC_LOCAL(uint16_t, u16Ax);
10233
10234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10235 IEMOP_HLP_DONE_DECODING();
10236 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10237 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10238 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10239 IEM_MC_FETCH_EFLAGS(EFlags);
10240 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10241 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10242 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10243 else
10244 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10245
10246 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10247 IEM_MC_COMMIT_EFLAGS(EFlags);
10248 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10249 IEM_MC_ADVANCE_RIP();
10250 IEM_MC_END();
10251 return VINF_SUCCESS;
10252
10253 case IEMMODE_32BIT:
10254 IEM_MC_BEGIN(4, 3);
10255 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10256 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10257 IEM_MC_ARG(uint32_t, u32Src, 2);
10258 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10260 IEM_MC_LOCAL(uint32_t, u32Eax);
10261
10262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10263 IEMOP_HLP_DONE_DECODING();
10264 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10265 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10266 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10267 IEM_MC_FETCH_EFLAGS(EFlags);
10268 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10269 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10270 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10271 else
10272 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10273
10274 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10275 IEM_MC_COMMIT_EFLAGS(EFlags);
10276 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10277 IEM_MC_ADVANCE_RIP();
10278 IEM_MC_END();
10279 return VINF_SUCCESS;
10280
10281 case IEMMODE_64BIT:
10282 IEM_MC_BEGIN(4, 3);
10283 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10284 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10285#ifdef RT_ARCH_X86
10286 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10287#else
10288 IEM_MC_ARG(uint64_t, u64Src, 2);
10289#endif
10290 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10292 IEM_MC_LOCAL(uint64_t, u64Rax);
10293
10294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10295 IEMOP_HLP_DONE_DECODING();
10296 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10297 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10298 IEM_MC_FETCH_EFLAGS(EFlags);
10299 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10300#ifdef RT_ARCH_X86
10301 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10302 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10303 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10304 else
10305 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10306#else
10307 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10308 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10309 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10310 else
10311 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10312#endif
10313
10314 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10315 IEM_MC_COMMIT_EFLAGS(EFlags);
10316 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10317 IEM_MC_ADVANCE_RIP();
10318 IEM_MC_END();
10319 return VINF_SUCCESS;
10320
10321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10322 }
10323 }
10324}
10325
10326
10327FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
10328{
10329 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
10330 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
10331
10332 switch (pVCpu->iem.s.enmEffOpSize)
10333 {
10334 case IEMMODE_16BIT:
10335 IEM_MC_BEGIN(5, 1);
10336 IEM_MC_ARG(uint16_t, uSel, 0);
10337 IEM_MC_ARG(uint16_t, offSeg, 1);
10338 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10339 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10340 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10341 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10344 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10345 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
10346 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10347 IEM_MC_END();
10348 return VINF_SUCCESS;
10349
10350 case IEMMODE_32BIT:
10351 IEM_MC_BEGIN(5, 1);
10352 IEM_MC_ARG(uint16_t, uSel, 0);
10353 IEM_MC_ARG(uint32_t, offSeg, 1);
10354 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10355 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10356 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10357 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10360 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10361 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
10362 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10363 IEM_MC_END();
10364 return VINF_SUCCESS;
10365
10366 case IEMMODE_64BIT:
10367 IEM_MC_BEGIN(5, 1);
10368 IEM_MC_ARG(uint16_t, uSel, 0);
10369 IEM_MC_ARG(uint64_t, offSeg, 1);
10370 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10371 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10372 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10373 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10376 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
10377 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10378 else
10379 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10380 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
10381 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10382 IEM_MC_END();
10383 return VINF_SUCCESS;
10384
10385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10386 }
10387}
10388
10389
10390/** Opcode 0x0f 0xb2. */
10391FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10392{
10393 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10394 IEMOP_HLP_MIN_386();
10395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10396 if (IEM_IS_MODRM_REG_MODE(bRm))
10397 return IEMOP_RAISE_INVALID_OPCODE();
10398 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10399}
10400
10401
10402/** Opcode 0x0f 0xb3. */
10403FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10404{
10405 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10406 IEMOP_HLP_MIN_386();
10407 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
10408}
10409
10410
10411/** Opcode 0x0f 0xb4. */
10412FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10413{
10414 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10415 IEMOP_HLP_MIN_386();
10416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10417 if (IEM_IS_MODRM_REG_MODE(bRm))
10418 return IEMOP_RAISE_INVALID_OPCODE();
10419 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10420}
10421
10422
10423/** Opcode 0x0f 0xb5. */
10424FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10425{
10426 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10427 IEMOP_HLP_MIN_386();
10428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10429 if (IEM_IS_MODRM_REG_MODE(bRm))
10430 return IEMOP_RAISE_INVALID_OPCODE();
10431 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10432}
10433
10434
10435/** Opcode 0x0f 0xb6. */
10436FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10437{
10438 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10439 IEMOP_HLP_MIN_386();
10440
10441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10442
10443 /*
10444 * If rm is denoting a register, no more instruction bytes.
10445 */
10446 if (IEM_IS_MODRM_REG_MODE(bRm))
10447 {
10448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10449 switch (pVCpu->iem.s.enmEffOpSize)
10450 {
10451 case IEMMODE_16BIT:
10452 IEM_MC_BEGIN(0, 1);
10453 IEM_MC_LOCAL(uint16_t, u16Value);
10454 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10455 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10456 IEM_MC_ADVANCE_RIP();
10457 IEM_MC_END();
10458 return VINF_SUCCESS;
10459
10460 case IEMMODE_32BIT:
10461 IEM_MC_BEGIN(0, 1);
10462 IEM_MC_LOCAL(uint32_t, u32Value);
10463 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10464 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10465 IEM_MC_ADVANCE_RIP();
10466 IEM_MC_END();
10467 return VINF_SUCCESS;
10468
10469 case IEMMODE_64BIT:
10470 IEM_MC_BEGIN(0, 1);
10471 IEM_MC_LOCAL(uint64_t, u64Value);
10472 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10473 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10474 IEM_MC_ADVANCE_RIP();
10475 IEM_MC_END();
10476 return VINF_SUCCESS;
10477
10478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10479 }
10480 }
10481 else
10482 {
10483 /*
10484 * We're loading a register from memory.
10485 */
10486 switch (pVCpu->iem.s.enmEffOpSize)
10487 {
10488 case IEMMODE_16BIT:
10489 IEM_MC_BEGIN(0, 2);
10490 IEM_MC_LOCAL(uint16_t, u16Value);
10491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10494 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10495 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10496 IEM_MC_ADVANCE_RIP();
10497 IEM_MC_END();
10498 return VINF_SUCCESS;
10499
10500 case IEMMODE_32BIT:
10501 IEM_MC_BEGIN(0, 2);
10502 IEM_MC_LOCAL(uint32_t, u32Value);
10503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10506 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10507 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10508 IEM_MC_ADVANCE_RIP();
10509 IEM_MC_END();
10510 return VINF_SUCCESS;
10511
10512 case IEMMODE_64BIT:
10513 IEM_MC_BEGIN(0, 2);
10514 IEM_MC_LOCAL(uint64_t, u64Value);
10515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10518 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10519 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10520 IEM_MC_ADVANCE_RIP();
10521 IEM_MC_END();
10522 return VINF_SUCCESS;
10523
10524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10525 }
10526 }
10527}
10528
10529
10530/** Opcode 0x0f 0xb7. */
10531FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10532{
10533 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10534 IEMOP_HLP_MIN_386();
10535
10536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10537
10538 /** @todo Not entirely sure how the operand size prefix is handled here,
10539 * assuming that it will be ignored. Would be nice to have a few
10540 * test for this. */
10541 /*
10542 * If rm is denoting a register, no more instruction bytes.
10543 */
10544 if (IEM_IS_MODRM_REG_MODE(bRm))
10545 {
10546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10547 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10548 {
10549 IEM_MC_BEGIN(0, 1);
10550 IEM_MC_LOCAL(uint32_t, u32Value);
10551 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10552 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10553 IEM_MC_ADVANCE_RIP();
10554 IEM_MC_END();
10555 }
10556 else
10557 {
10558 IEM_MC_BEGIN(0, 1);
10559 IEM_MC_LOCAL(uint64_t, u64Value);
10560 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10561 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10562 IEM_MC_ADVANCE_RIP();
10563 IEM_MC_END();
10564 }
10565 }
10566 else
10567 {
10568 /*
10569 * We're loading a register from memory.
10570 */
10571 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10572 {
10573 IEM_MC_BEGIN(0, 2);
10574 IEM_MC_LOCAL(uint32_t, u32Value);
10575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10578 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10579 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10580 IEM_MC_ADVANCE_RIP();
10581 IEM_MC_END();
10582 }
10583 else
10584 {
10585 IEM_MC_BEGIN(0, 2);
10586 IEM_MC_LOCAL(uint64_t, u64Value);
10587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10590 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10591 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10592 IEM_MC_ADVANCE_RIP();
10593 IEM_MC_END();
10594 }
10595 }
10596 return VINF_SUCCESS;
10597}
10598
10599
10600/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10601FNIEMOP_UD_STUB(iemOp_jmpe);
10602
10603
10604/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10605FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10606{
10607 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10608 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10609 return iemOp_InvalidNeedRM(pVCpu);
10610#ifndef TST_IEM_CHECK_MC
10611# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10612 static const IEMOPBINSIZES s_Native =
10613 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10614# endif
10615 static const IEMOPBINSIZES s_Fallback =
10616 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10617#endif
10618 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
10619}
10620
10621
10622/**
10623 * @opcode 0xb9
10624 * @opinvalid intel-modrm
10625 * @optest ->
10626 */
10627FNIEMOP_DEF(iemOp_Grp10)
10628{
10629 /*
10630 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10631 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10632 */
10633 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10634 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10635 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10636}
10637
10638
10639/** Opcode 0x0f 0xba. */
10640FNIEMOP_DEF(iemOp_Grp8)
10641{
10642 IEMOP_HLP_MIN_386();
10643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10644 PCIEMOPBINSIZES pImpl;
10645 switch (IEM_GET_MODRM_REG_8(bRm))
10646 {
10647 case 0: case 1: case 2: case 3:
10648 /* Both AMD and Intel want full modr/m decoding and imm8. */
10649 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10650 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
10651 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
10652 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
10653 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
10654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10655 }
10656 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10657
10658 if (IEM_IS_MODRM_REG_MODE(bRm))
10659 {
10660 /* register destination. */
10661 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10663
10664 switch (pVCpu->iem.s.enmEffOpSize)
10665 {
10666 case IEMMODE_16BIT:
10667 IEM_MC_BEGIN(3, 0);
10668 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10669 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
10670 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10671
10672 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10673 IEM_MC_REF_EFLAGS(pEFlags);
10674 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10675
10676 IEM_MC_ADVANCE_RIP();
10677 IEM_MC_END();
10678 return VINF_SUCCESS;
10679
10680 case IEMMODE_32BIT:
10681 IEM_MC_BEGIN(3, 0);
10682 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10683 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
10684 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10685
10686 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10687 IEM_MC_REF_EFLAGS(pEFlags);
10688 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10689
10690 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10691 IEM_MC_ADVANCE_RIP();
10692 IEM_MC_END();
10693 return VINF_SUCCESS;
10694
10695 case IEMMODE_64BIT:
10696 IEM_MC_BEGIN(3, 0);
10697 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10698 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
10699 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10700
10701 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10702 IEM_MC_REF_EFLAGS(pEFlags);
10703 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10704
10705 IEM_MC_ADVANCE_RIP();
10706 IEM_MC_END();
10707 return VINF_SUCCESS;
10708
10709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10710 }
10711 }
10712 else
10713 {
10714 /* memory destination. */
10715
10716 uint32_t fAccess;
10717 if (pImpl->pfnLockedU16)
10718 fAccess = IEM_ACCESS_DATA_RW;
10719 else /* BT */
10720 fAccess = IEM_ACCESS_DATA_R;
10721
10722 /** @todo test negative bit offsets! */
10723 switch (pVCpu->iem.s.enmEffOpSize)
10724 {
10725 case IEMMODE_16BIT:
10726 IEM_MC_BEGIN(3, 1);
10727 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10728 IEM_MC_ARG(uint16_t, u16Src, 1);
10729 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10731
10732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10733 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10734 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
10735 if (pImpl->pfnLockedU16)
10736 IEMOP_HLP_DONE_DECODING();
10737 else
10738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10739 IEM_MC_FETCH_EFLAGS(EFlags);
10740 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10741 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10742 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10743 else
10744 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10745 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10746
10747 IEM_MC_COMMIT_EFLAGS(EFlags);
10748 IEM_MC_ADVANCE_RIP();
10749 IEM_MC_END();
10750 return VINF_SUCCESS;
10751
10752 case IEMMODE_32BIT:
10753 IEM_MC_BEGIN(3, 1);
10754 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10755 IEM_MC_ARG(uint32_t, u32Src, 1);
10756 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10758
10759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10760 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10761 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
10762 if (pImpl->pfnLockedU16)
10763 IEMOP_HLP_DONE_DECODING();
10764 else
10765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10766 IEM_MC_FETCH_EFLAGS(EFlags);
10767 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10768 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10769 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10770 else
10771 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10772 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10773
10774 IEM_MC_COMMIT_EFLAGS(EFlags);
10775 IEM_MC_ADVANCE_RIP();
10776 IEM_MC_END();
10777 return VINF_SUCCESS;
10778
10779 case IEMMODE_64BIT:
10780 IEM_MC_BEGIN(3, 1);
10781 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10782 IEM_MC_ARG(uint64_t, u64Src, 1);
10783 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10785
10786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10787 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10788 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
10789 if (pImpl->pfnLockedU16)
10790 IEMOP_HLP_DONE_DECODING();
10791 else
10792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10793 IEM_MC_FETCH_EFLAGS(EFlags);
10794 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10795 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10796 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10797 else
10798 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10799 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10800
10801 IEM_MC_COMMIT_EFLAGS(EFlags);
10802 IEM_MC_ADVANCE_RIP();
10803 IEM_MC_END();
10804 return VINF_SUCCESS;
10805
10806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10807 }
10808 }
10809}
10810
10811
10812/** Opcode 0x0f 0xbb. */
10813FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10814{
10815 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10816 IEMOP_HLP_MIN_386();
10817 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
10818}
10819
10820
10821/**
10822 * Common worker for BSF and BSR instructions.
10823 *
10824 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10825 * the destination register, which means that for 32-bit operations the high
10826 * bits must be left alone.
10827 *
10828 * @param pImpl Pointer to the instruction implementation (assembly).
10829 */
10830FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10831{
10832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10833
10834 /*
10835 * If rm is denoting a register, no more instruction bytes.
10836 */
10837 if (IEM_IS_MODRM_REG_MODE(bRm))
10838 {
10839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10840 switch (pVCpu->iem.s.enmEffOpSize)
10841 {
10842 case IEMMODE_16BIT:
10843 IEM_MC_BEGIN(3, 0);
10844 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10845 IEM_MC_ARG(uint16_t, u16Src, 1);
10846 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10847
10848 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10849 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10850 IEM_MC_REF_EFLAGS(pEFlags);
10851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10852
10853 IEM_MC_ADVANCE_RIP();
10854 IEM_MC_END();
10855 break;
10856
10857 case IEMMODE_32BIT:
10858 IEM_MC_BEGIN(3, 0);
10859 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10860 IEM_MC_ARG(uint32_t, u32Src, 1);
10861 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10862
10863 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10864 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10865 IEM_MC_REF_EFLAGS(pEFlags);
10866 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10867 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10868 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10869 IEM_MC_ENDIF();
10870 IEM_MC_ADVANCE_RIP();
10871 IEM_MC_END();
10872 break;
10873
10874 case IEMMODE_64BIT:
10875 IEM_MC_BEGIN(3, 0);
10876 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10877 IEM_MC_ARG(uint64_t, u64Src, 1);
10878 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10879
10880 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10881 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10882 IEM_MC_REF_EFLAGS(pEFlags);
10883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10884
10885 IEM_MC_ADVANCE_RIP();
10886 IEM_MC_END();
10887 break;
10888 }
10889 }
10890 else
10891 {
10892 /*
10893 * We're accessing memory.
10894 */
10895 switch (pVCpu->iem.s.enmEffOpSize)
10896 {
10897 case IEMMODE_16BIT:
10898 IEM_MC_BEGIN(3, 1);
10899 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10900 IEM_MC_ARG(uint16_t, u16Src, 1);
10901 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10903
10904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10906 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10907 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10908 IEM_MC_REF_EFLAGS(pEFlags);
10909 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10910
10911 IEM_MC_ADVANCE_RIP();
10912 IEM_MC_END();
10913 break;
10914
10915 case IEMMODE_32BIT:
10916 IEM_MC_BEGIN(3, 1);
10917 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10918 IEM_MC_ARG(uint32_t, u32Src, 1);
10919 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10921
10922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10924 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10925 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10926 IEM_MC_REF_EFLAGS(pEFlags);
10927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10928
10929 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10930 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10931 IEM_MC_ENDIF();
10932 IEM_MC_ADVANCE_RIP();
10933 IEM_MC_END();
10934 break;
10935
10936 case IEMMODE_64BIT:
10937 IEM_MC_BEGIN(3, 1);
10938 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10939 IEM_MC_ARG(uint64_t, u64Src, 1);
10940 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10942
10943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10945 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10946 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10947 IEM_MC_REF_EFLAGS(pEFlags);
10948 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10949
10950 IEM_MC_ADVANCE_RIP();
10951 IEM_MC_END();
10952 break;
10953 }
10954 }
10955 return VINF_SUCCESS;
10956}
10957
10958
10959/** Opcode 0x0f 0xbc. */
10960FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
10961{
10962 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
10963 IEMOP_HLP_MIN_386();
10964 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10965 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
10966}
10967
10968
10969/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
10970FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
10971{
10972 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10973 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
10974 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10975
10976#ifndef TST_IEM_CHECK_MC
10977 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
10978 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
10979 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
10980 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
10981 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
10982 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
10983 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
10984 {
10985 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
10986 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
10987 };
10988#endif
10989 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10990 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10991 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10992}
10993
10994
10995/** Opcode 0x0f 0xbd. */
10996FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
10997{
10998 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
10999 IEMOP_HLP_MIN_386();
11000 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11001 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11002}
11003
11004
11005/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11006FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11007{
11008 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11009 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11010 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11011
11012#ifndef TST_IEM_CHECK_MC
11013 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11014 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11015 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11016 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11017 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11018 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11019 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11020 {
11021 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11022 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11023 };
11024#endif
11025 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11026 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
11027 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
11028}
11029
11030
11031
11032/** Opcode 0x0f 0xbe. */
11033FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11034{
11035 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11036 IEMOP_HLP_MIN_386();
11037
11038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11039
11040 /*
11041 * If rm is denoting a register, no more instruction bytes.
11042 */
11043 if (IEM_IS_MODRM_REG_MODE(bRm))
11044 {
11045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11046 switch (pVCpu->iem.s.enmEffOpSize)
11047 {
11048 case IEMMODE_16BIT:
11049 IEM_MC_BEGIN(0, 1);
11050 IEM_MC_LOCAL(uint16_t, u16Value);
11051 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11052 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11053 IEM_MC_ADVANCE_RIP();
11054 IEM_MC_END();
11055 return VINF_SUCCESS;
11056
11057 case IEMMODE_32BIT:
11058 IEM_MC_BEGIN(0, 1);
11059 IEM_MC_LOCAL(uint32_t, u32Value);
11060 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11061 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11062 IEM_MC_ADVANCE_RIP();
11063 IEM_MC_END();
11064 return VINF_SUCCESS;
11065
11066 case IEMMODE_64BIT:
11067 IEM_MC_BEGIN(0, 1);
11068 IEM_MC_LOCAL(uint64_t, u64Value);
11069 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11070 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11071 IEM_MC_ADVANCE_RIP();
11072 IEM_MC_END();
11073 return VINF_SUCCESS;
11074
11075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11076 }
11077 }
11078 else
11079 {
11080 /*
11081 * We're loading a register from memory.
11082 */
11083 switch (pVCpu->iem.s.enmEffOpSize)
11084 {
11085 case IEMMODE_16BIT:
11086 IEM_MC_BEGIN(0, 2);
11087 IEM_MC_LOCAL(uint16_t, u16Value);
11088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11091 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11092 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11093 IEM_MC_ADVANCE_RIP();
11094 IEM_MC_END();
11095 return VINF_SUCCESS;
11096
11097 case IEMMODE_32BIT:
11098 IEM_MC_BEGIN(0, 2);
11099 IEM_MC_LOCAL(uint32_t, u32Value);
11100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11103 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11104 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11105 IEM_MC_ADVANCE_RIP();
11106 IEM_MC_END();
11107 return VINF_SUCCESS;
11108
11109 case IEMMODE_64BIT:
11110 IEM_MC_BEGIN(0, 2);
11111 IEM_MC_LOCAL(uint64_t, u64Value);
11112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11115 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11116 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11117 IEM_MC_ADVANCE_RIP();
11118 IEM_MC_END();
11119 return VINF_SUCCESS;
11120
11121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11122 }
11123 }
11124}
11125
11126
11127/** Opcode 0x0f 0xbf. */
11128FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11129{
11130 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11131 IEMOP_HLP_MIN_386();
11132
11133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11134
11135 /** @todo Not entirely sure how the operand size prefix is handled here,
11136 * assuming that it will be ignored. Would be nice to have a few
11137 * test for this. */
11138 /*
11139 * If rm is denoting a register, no more instruction bytes.
11140 */
11141 if (IEM_IS_MODRM_REG_MODE(bRm))
11142 {
11143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11144 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11145 {
11146 IEM_MC_BEGIN(0, 1);
11147 IEM_MC_LOCAL(uint32_t, u32Value);
11148 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11149 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11150 IEM_MC_ADVANCE_RIP();
11151 IEM_MC_END();
11152 }
11153 else
11154 {
11155 IEM_MC_BEGIN(0, 1);
11156 IEM_MC_LOCAL(uint64_t, u64Value);
11157 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11158 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11159 IEM_MC_ADVANCE_RIP();
11160 IEM_MC_END();
11161 }
11162 }
11163 else
11164 {
11165 /*
11166 * We're loading a register from memory.
11167 */
11168 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11169 {
11170 IEM_MC_BEGIN(0, 2);
11171 IEM_MC_LOCAL(uint32_t, u32Value);
11172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11175 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11176 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11177 IEM_MC_ADVANCE_RIP();
11178 IEM_MC_END();
11179 }
11180 else
11181 {
11182 IEM_MC_BEGIN(0, 2);
11183 IEM_MC_LOCAL(uint64_t, u64Value);
11184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11187 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11188 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11189 IEM_MC_ADVANCE_RIP();
11190 IEM_MC_END();
11191 }
11192 }
11193 return VINF_SUCCESS;
11194}
11195
11196
11197/** Opcode 0x0f 0xc0. */
11198FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11199{
11200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11201 IEMOP_HLP_MIN_486();
11202 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11203
11204 /*
11205 * If rm is denoting a register, no more instruction bytes.
11206 */
11207 if (IEM_IS_MODRM_REG_MODE(bRm))
11208 {
11209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11210
11211 IEM_MC_BEGIN(3, 0);
11212 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11213 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11214 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11215
11216 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11217 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11218 IEM_MC_REF_EFLAGS(pEFlags);
11219 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11220
11221 IEM_MC_ADVANCE_RIP();
11222 IEM_MC_END();
11223 }
11224 else
11225 {
11226 /*
11227 * We're accessing memory.
11228 */
11229 IEM_MC_BEGIN(3, 3);
11230 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11231 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11232 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11233 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11235
11236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11237 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11238 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11239 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11240 IEM_MC_FETCH_EFLAGS(EFlags);
11241 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11242 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11243 else
11244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11245
11246 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11247 IEM_MC_COMMIT_EFLAGS(EFlags);
11248 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11249 IEM_MC_ADVANCE_RIP();
11250 IEM_MC_END();
11251 return VINF_SUCCESS;
11252 }
11253 return VINF_SUCCESS;
11254}
11255
11256
11257/** Opcode 0x0f 0xc1. */
11258FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11259{
11260 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11261 IEMOP_HLP_MIN_486();
11262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11263
11264 /*
11265 * If rm is denoting a register, no more instruction bytes.
11266 */
11267 if (IEM_IS_MODRM_REG_MODE(bRm))
11268 {
11269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11270
11271 switch (pVCpu->iem.s.enmEffOpSize)
11272 {
11273 case IEMMODE_16BIT:
11274 IEM_MC_BEGIN(3, 0);
11275 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11276 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11277 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11278
11279 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11280 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11281 IEM_MC_REF_EFLAGS(pEFlags);
11282 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11283
11284 IEM_MC_ADVANCE_RIP();
11285 IEM_MC_END();
11286 return VINF_SUCCESS;
11287
11288 case IEMMODE_32BIT:
11289 IEM_MC_BEGIN(3, 0);
11290 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11291 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11292 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11293
11294 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11295 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11296 IEM_MC_REF_EFLAGS(pEFlags);
11297 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11298
11299 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11300 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11301 IEM_MC_ADVANCE_RIP();
11302 IEM_MC_END();
11303 return VINF_SUCCESS;
11304
11305 case IEMMODE_64BIT:
11306 IEM_MC_BEGIN(3, 0);
11307 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11308 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11309 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11310
11311 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11312 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11313 IEM_MC_REF_EFLAGS(pEFlags);
11314 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11315
11316 IEM_MC_ADVANCE_RIP();
11317 IEM_MC_END();
11318 return VINF_SUCCESS;
11319
11320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11321 }
11322 }
11323 else
11324 {
11325 /*
11326 * We're accessing memory.
11327 */
11328 switch (pVCpu->iem.s.enmEffOpSize)
11329 {
11330 case IEMMODE_16BIT:
11331 IEM_MC_BEGIN(3, 3);
11332 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11333 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11334 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11335 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11337
11338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11339 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11340 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11341 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11342 IEM_MC_FETCH_EFLAGS(EFlags);
11343 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11344 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11345 else
11346 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11347
11348 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11349 IEM_MC_COMMIT_EFLAGS(EFlags);
11350 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11351 IEM_MC_ADVANCE_RIP();
11352 IEM_MC_END();
11353 return VINF_SUCCESS;
11354
11355 case IEMMODE_32BIT:
11356 IEM_MC_BEGIN(3, 3);
11357 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11358 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11359 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11360 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11362
11363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11364 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11365 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11366 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11367 IEM_MC_FETCH_EFLAGS(EFlags);
11368 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11369 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11370 else
11371 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11372
11373 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11374 IEM_MC_COMMIT_EFLAGS(EFlags);
11375 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11376 IEM_MC_ADVANCE_RIP();
11377 IEM_MC_END();
11378 return VINF_SUCCESS;
11379
11380 case IEMMODE_64BIT:
11381 IEM_MC_BEGIN(3, 3);
11382 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11383 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11384 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11385 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11387
11388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11389 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11390 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11391 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11392 IEM_MC_FETCH_EFLAGS(EFlags);
11393 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11394 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11395 else
11396 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11397
11398 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11399 IEM_MC_COMMIT_EFLAGS(EFlags);
11400 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11401 IEM_MC_ADVANCE_RIP();
11402 IEM_MC_END();
11403 return VINF_SUCCESS;
11404
11405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11406 }
11407 }
11408}
11409
11410
11411/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11412FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11413{
11414 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11415
11416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11417 if (IEM_IS_MODRM_REG_MODE(bRm))
11418 {
11419 /*
11420 * Register, register.
11421 */
11422 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11424 IEM_MC_BEGIN(4, 2);
11425 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11426 IEM_MC_LOCAL(X86XMMREG, Dst);
11427 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11428 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11429 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11430 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11432 IEM_MC_PREPARE_SSE_USAGE();
11433 IEM_MC_REF_MXCSR(pfMxcsr);
11434 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11435 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11436 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11437 IEM_MC_IF_MXCSR_XCPT_PENDING()
11438 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11439 IEM_MC_ELSE()
11440 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11441 IEM_MC_ENDIF();
11442
11443 IEM_MC_ADVANCE_RIP();
11444 IEM_MC_END();
11445 }
11446 else
11447 {
11448 /*
11449 * Register, memory.
11450 */
11451 IEM_MC_BEGIN(4, 3);
11452 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11453 IEM_MC_LOCAL(X86XMMREG, Dst);
11454 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11455 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11456 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11458
11459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11460 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11461 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11463 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11464 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11465
11466 IEM_MC_PREPARE_SSE_USAGE();
11467 IEM_MC_REF_MXCSR(pfMxcsr);
11468 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11469 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11470 IEM_MC_IF_MXCSR_XCPT_PENDING()
11471 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11472 IEM_MC_ELSE()
11473 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11474 IEM_MC_ENDIF();
11475
11476 IEM_MC_ADVANCE_RIP();
11477 IEM_MC_END();
11478 }
11479 return VINF_SUCCESS;
11480}
11481
11482
11483/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11484FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11485{
11486 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11487
11488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11489 if (IEM_IS_MODRM_REG_MODE(bRm))
11490 {
11491 /*
11492 * Register, register.
11493 */
11494 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11496 IEM_MC_BEGIN(4, 2);
11497 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11498 IEM_MC_LOCAL(X86XMMREG, Dst);
11499 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11500 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11501 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11502 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11503 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11504 IEM_MC_PREPARE_SSE_USAGE();
11505 IEM_MC_REF_MXCSR(pfMxcsr);
11506 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11507 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11508 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11509 IEM_MC_IF_MXCSR_XCPT_PENDING()
11510 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11511 IEM_MC_ELSE()
11512 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11513 IEM_MC_ENDIF();
11514
11515 IEM_MC_ADVANCE_RIP();
11516 IEM_MC_END();
11517 }
11518 else
11519 {
11520 /*
11521 * Register, memory.
11522 */
11523 IEM_MC_BEGIN(4, 3);
11524 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11525 IEM_MC_LOCAL(X86XMMREG, Dst);
11526 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11527 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11528 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11530
11531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11532 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11533 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11535 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11536 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11537
11538 IEM_MC_PREPARE_SSE_USAGE();
11539 IEM_MC_REF_MXCSR(pfMxcsr);
11540 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11541 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11542 IEM_MC_IF_MXCSR_XCPT_PENDING()
11543 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11544 IEM_MC_ELSE()
11545 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11546 IEM_MC_ENDIF();
11547
11548 IEM_MC_ADVANCE_RIP();
11549 IEM_MC_END();
11550 }
11551 return VINF_SUCCESS;
11552}
11553
11554
11555/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11556FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11557{
11558 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11559
11560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11561 if (IEM_IS_MODRM_REG_MODE(bRm))
11562 {
11563 /*
11564 * Register, register.
11565 */
11566 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11568 IEM_MC_BEGIN(4, 2);
11569 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11570 IEM_MC_LOCAL(X86XMMREG, Dst);
11571 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11572 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11573 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11574 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11575 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11576 IEM_MC_PREPARE_SSE_USAGE();
11577 IEM_MC_REF_MXCSR(pfMxcsr);
11578 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11579 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11580 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11581 IEM_MC_IF_MXCSR_XCPT_PENDING()
11582 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11583 IEM_MC_ELSE()
11584 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11585 IEM_MC_ENDIF();
11586
11587 IEM_MC_ADVANCE_RIP();
11588 IEM_MC_END();
11589 }
11590 else
11591 {
11592 /*
11593 * Register, memory.
11594 */
11595 IEM_MC_BEGIN(4, 3);
11596 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11597 IEM_MC_LOCAL(X86XMMREG, Dst);
11598 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11599 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11600 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11602
11603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11604 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11605 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11607 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11608 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11609
11610 IEM_MC_PREPARE_SSE_USAGE();
11611 IEM_MC_REF_MXCSR(pfMxcsr);
11612 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11613 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11614 IEM_MC_IF_MXCSR_XCPT_PENDING()
11615 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11616 IEM_MC_ELSE()
11617 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11618 IEM_MC_ENDIF();
11619
11620 IEM_MC_ADVANCE_RIP();
11621 IEM_MC_END();
11622 }
11623 return VINF_SUCCESS;
11624}
11625
11626
11627/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11628FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11629{
11630 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11631
11632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11633 if (IEM_IS_MODRM_REG_MODE(bRm))
11634 {
11635 /*
11636 * Register, register.
11637 */
11638 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11640 IEM_MC_BEGIN(4, 2);
11641 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11642 IEM_MC_LOCAL(X86XMMREG, Dst);
11643 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11644 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11645 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11646 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11647 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11648 IEM_MC_PREPARE_SSE_USAGE();
11649 IEM_MC_REF_MXCSR(pfMxcsr);
11650 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11651 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11652 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11653 IEM_MC_IF_MXCSR_XCPT_PENDING()
11654 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11655 IEM_MC_ELSE()
11656 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11657 IEM_MC_ENDIF();
11658
11659 IEM_MC_ADVANCE_RIP();
11660 IEM_MC_END();
11661 }
11662 else
11663 {
11664 /*
11665 * Register, memory.
11666 */
11667 IEM_MC_BEGIN(4, 3);
11668 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11669 IEM_MC_LOCAL(X86XMMREG, Dst);
11670 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11671 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11672 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11674
11675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11676 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11677 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11679 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11680 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11681
11682 IEM_MC_PREPARE_SSE_USAGE();
11683 IEM_MC_REF_MXCSR(pfMxcsr);
11684 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11685 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11686 IEM_MC_IF_MXCSR_XCPT_PENDING()
11687 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11688 IEM_MC_ELSE()
11689 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11690 IEM_MC_ENDIF();
11691
11692 IEM_MC_ADVANCE_RIP();
11693 IEM_MC_END();
11694 }
11695 return VINF_SUCCESS;
11696}
11697
11698
11699/** Opcode 0x0f 0xc3. */
11700FNIEMOP_DEF(iemOp_movnti_My_Gy)
11701{
11702 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11703
11704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11705
11706 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11707 if (IEM_IS_MODRM_MEM_MODE(bRm))
11708 {
11709 switch (pVCpu->iem.s.enmEffOpSize)
11710 {
11711 case IEMMODE_32BIT:
11712 IEM_MC_BEGIN(0, 2);
11713 IEM_MC_LOCAL(uint32_t, u32Value);
11714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11715
11716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11718 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11719 return IEMOP_RAISE_INVALID_OPCODE();
11720
11721 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11722 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11723 IEM_MC_ADVANCE_RIP();
11724 IEM_MC_END();
11725 break;
11726
11727 case IEMMODE_64BIT:
11728 IEM_MC_BEGIN(0, 2);
11729 IEM_MC_LOCAL(uint64_t, u64Value);
11730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11731
11732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11734 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11735 return IEMOP_RAISE_INVALID_OPCODE();
11736
11737 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11738 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11739 IEM_MC_ADVANCE_RIP();
11740 IEM_MC_END();
11741 break;
11742
11743 case IEMMODE_16BIT:
11744 /** @todo check this form. */
11745 return IEMOP_RAISE_INVALID_OPCODE();
11746 }
11747 }
11748 else
11749 return IEMOP_RAISE_INVALID_OPCODE();
11750 return VINF_SUCCESS;
11751}
11752
11753
11754/* Opcode 0x66 0x0f 0xc3 - invalid */
11755/* Opcode 0xf3 0x0f 0xc3 - invalid */
11756/* Opcode 0xf2 0x0f 0xc3 - invalid */
11757
11758
11759/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11760FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11761{
11762 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11764 if (IEM_IS_MODRM_REG_MODE(bRm))
11765 {
11766 /*
11767 * Register, register.
11768 */
11769 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11771 IEM_MC_BEGIN(3, 0);
11772 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11773 IEM_MC_ARG(uint16_t, u16Src, 1);
11774 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11775 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11776 IEM_MC_PREPARE_FPU_USAGE();
11777 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11778 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11779 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11780 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11781 IEM_MC_FPU_TO_MMX_MODE();
11782 IEM_MC_ADVANCE_RIP();
11783 IEM_MC_END();
11784 }
11785 else
11786 {
11787 /*
11788 * Register, memory.
11789 */
11790 IEM_MC_BEGIN(3, 2);
11791 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11792 IEM_MC_ARG(uint16_t, u16Src, 1);
11793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11794
11795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11796 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11797 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11799 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11800 IEM_MC_PREPARE_FPU_USAGE();
11801
11802 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11803 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11804 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11805 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11806 IEM_MC_FPU_TO_MMX_MODE();
11807 IEM_MC_ADVANCE_RIP();
11808 IEM_MC_END();
11809 }
11810 return VINF_SUCCESS;
11811}
11812
11813
11814/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11815FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11816{
11817 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11819 if (IEM_IS_MODRM_REG_MODE(bRm))
11820 {
11821 /*
11822 * Register, register.
11823 */
11824 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11826 IEM_MC_BEGIN(3, 0);
11827 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11828 IEM_MC_ARG(uint16_t, u16Src, 1);
11829 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11830 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11831 IEM_MC_PREPARE_SSE_USAGE();
11832 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11833 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11834 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11835 IEM_MC_ADVANCE_RIP();
11836 IEM_MC_END();
11837 }
11838 else
11839 {
11840 /*
11841 * Register, memory.
11842 */
11843 IEM_MC_BEGIN(3, 2);
11844 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11845 IEM_MC_ARG(uint16_t, u16Src, 1);
11846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11847
11848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11849 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11850 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11852 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11853 IEM_MC_PREPARE_SSE_USAGE();
11854
11855 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11856 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11857 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11858 IEM_MC_ADVANCE_RIP();
11859 IEM_MC_END();
11860 }
11861 return VINF_SUCCESS;
11862}
11863
11864
11865/* Opcode 0xf3 0x0f 0xc4 - invalid */
11866/* Opcode 0xf2 0x0f 0xc4 - invalid */
11867
11868
11869/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11870FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11871{
11872 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pinsrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);*/ /** @todo */
11873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11874 if (IEM_IS_MODRM_REG_MODE(bRm))
11875 {
11876 /*
11877 * Register, register.
11878 */
11879 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11881 IEM_MC_BEGIN(3, 1);
11882 IEM_MC_LOCAL(uint16_t, u16Dst);
11883 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11884 IEM_MC_ARG(uint64_t, u64Src, 1);
11885 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11886 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11887 IEM_MC_PREPARE_FPU_USAGE();
11888 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11889 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bEvilArg);
11890 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11891 IEM_MC_FPU_TO_MMX_MODE();
11892 IEM_MC_ADVANCE_RIP();
11893 IEM_MC_END();
11894 return VINF_SUCCESS;
11895 }
11896
11897 /* No memory operand. */
11898 return IEMOP_RAISE_INVALID_OPCODE();
11899}
11900
11901
11902/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11903FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11904{
11905 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11907 if (IEM_IS_MODRM_REG_MODE(bRm))
11908 {
11909 /*
11910 * Register, register.
11911 */
11912 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11914 IEM_MC_BEGIN(3, 1);
11915 IEM_MC_LOCAL(uint16_t, u16Dst);
11916 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11917 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11918 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11919 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11920 IEM_MC_PREPARE_SSE_USAGE();
11921 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11922 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bEvilArg);
11923 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11924 IEM_MC_ADVANCE_RIP();
11925 IEM_MC_END();
11926 return VINF_SUCCESS;
11927 }
11928
11929 /* No memory operand. */
11930 return IEMOP_RAISE_INVALID_OPCODE();
11931}
11932
11933
11934/* Opcode 0xf3 0x0f 0xc5 - invalid */
11935/* Opcode 0xf2 0x0f 0xc5 - invalid */
11936
11937
11938/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11939FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11940{
11941 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11943 if (IEM_IS_MODRM_REG_MODE(bRm))
11944 {
11945 /*
11946 * Register, register.
11947 */
11948 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11950 IEM_MC_BEGIN(3, 0);
11951 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11952 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11953 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11954 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11955 IEM_MC_PREPARE_SSE_USAGE();
11956 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11957 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11958 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11959 IEM_MC_ADVANCE_RIP();
11960 IEM_MC_END();
11961 }
11962 else
11963 {
11964 /*
11965 * Register, memory.
11966 */
11967 IEM_MC_BEGIN(3, 2);
11968 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11969 IEM_MC_LOCAL(RTUINT128U, uSrc);
11970 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11972
11973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11974 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11975 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11977 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11978 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11979
11980 IEM_MC_PREPARE_SSE_USAGE();
11981 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11982 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11983
11984 IEM_MC_ADVANCE_RIP();
11985 IEM_MC_END();
11986 }
11987 return VINF_SUCCESS;
11988}
11989
11990
11991/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
11992FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
11993{
11994 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11996 if (IEM_IS_MODRM_REG_MODE(bRm))
11997 {
11998 /*
11999 * Register, register.
12000 */
12001 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
12002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12003 IEM_MC_BEGIN(3, 0);
12004 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12005 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12006 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
12007 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12008 IEM_MC_PREPARE_SSE_USAGE();
12009 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12010 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12011 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
12012 IEM_MC_ADVANCE_RIP();
12013 IEM_MC_END();
12014 }
12015 else
12016 {
12017 /*
12018 * Register, memory.
12019 */
12020 IEM_MC_BEGIN(3, 2);
12021 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12022 IEM_MC_LOCAL(RTUINT128U, uSrc);
12023 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12025
12026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12027 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
12028 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
12029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12030 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12031 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12032
12033 IEM_MC_PREPARE_SSE_USAGE();
12034 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12035 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
12036
12037 IEM_MC_ADVANCE_RIP();
12038 IEM_MC_END();
12039 }
12040 return VINF_SUCCESS;
12041}
12042
12043
12044/* Opcode 0xf3 0x0f 0xc6 - invalid */
12045/* Opcode 0xf2 0x0f 0xc6 - invalid */
12046
12047
12048/** Opcode 0x0f 0xc7 !11/1. */
12049FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12050{
12051 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12052
12053 IEM_MC_BEGIN(4, 3);
12054 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12055 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12056 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12057 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12058 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12059 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12061
12062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12063 IEMOP_HLP_DONE_DECODING();
12064 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12065
12066 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12067 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12068 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12069
12070 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12071 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12072 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12073
12074 IEM_MC_FETCH_EFLAGS(EFlags);
12075 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12076 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12077 else
12078 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12079
12080 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12081 IEM_MC_COMMIT_EFLAGS(EFlags);
12082 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12083 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
12084 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12085 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12086 IEM_MC_ENDIF();
12087 IEM_MC_ADVANCE_RIP();
12088
12089 IEM_MC_END();
12090 return VINF_SUCCESS;
12091}
12092
12093
12094/** Opcode REX.W 0x0f 0xc7 !11/1. */
12095FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12096{
12097 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12098 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12099 {
12100#if 0
12101 RT_NOREF(bRm);
12102 IEMOP_BITCH_ABOUT_STUB();
12103 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
12104#else
12105 IEM_MC_BEGIN(4, 3);
12106 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12107 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12108 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12109 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12110 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12111 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12113
12114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12115 IEMOP_HLP_DONE_DECODING();
12116 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12117 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12118
12119 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12120 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12121 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12122
12123 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12124 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12125 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12126
12127 IEM_MC_FETCH_EFLAGS(EFlags);
12128# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
12129# if defined(RT_ARCH_AMD64)
12130 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12131# endif
12132 {
12133 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12134 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12135 else
12136 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12137 }
12138# if defined(RT_ARCH_AMD64)
12139 else
12140# endif
12141# endif
12142# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
12143 {
12144 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12145 accesses and not all all atomic, which works fine on in UNI CPU guest
12146 configuration (ignoring DMA). If guest SMP is active we have no choice
12147 but to use a rendezvous callback here. Sigh. */
12148 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12149 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12150 else
12151 {
12152 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12153 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12154 }
12155 }
12156# endif
12157
12158 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12159 IEM_MC_COMMIT_EFLAGS(EFlags);
12160 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12161 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12162 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12163 IEM_MC_ENDIF();
12164 IEM_MC_ADVANCE_RIP();
12165
12166 IEM_MC_END();
12167 return VINF_SUCCESS;
12168#endif
12169 }
12170 Log(("cmpxchg16b -> #UD\n"));
12171 return IEMOP_RAISE_INVALID_OPCODE();
12172}
12173
12174FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12175{
12176 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12177 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12178 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12179}
12180
12181/** Opcode 0x0f 0xc7 11/6. */
12182FNIEMOP_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
12183
12184/** Opcode 0x0f 0xc7 !11/6. */
12185#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12186FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12187{
12188 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12189 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12190 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12191 IEM_MC_BEGIN(2, 0);
12192 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12193 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12195 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12196 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12197 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12198 IEM_MC_END();
12199 return VINF_SUCCESS;
12200}
12201#else
12202FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12203#endif
12204
12205/** Opcode 0x66 0x0f 0xc7 !11/6. */
12206#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12207FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12208{
12209 IEMOP_MNEMONIC(vmclear, "vmclear");
12210 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12211 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12212 IEM_MC_BEGIN(2, 0);
12213 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12214 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12216 IEMOP_HLP_DONE_DECODING();
12217 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12218 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12219 IEM_MC_END();
12220 return VINF_SUCCESS;
12221}
12222#else
12223FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12224#endif
12225
12226/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12227#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12228FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12229{
12230 IEMOP_MNEMONIC(vmxon, "vmxon");
12231 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12232 IEM_MC_BEGIN(2, 0);
12233 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12234 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12236 IEMOP_HLP_DONE_DECODING();
12237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12238 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12239 IEM_MC_END();
12240 return VINF_SUCCESS;
12241}
12242#else
12243FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12244#endif
12245
12246/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12247#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12248FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12249{
12250 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12251 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12252 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12253 IEM_MC_BEGIN(2, 0);
12254 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12255 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12257 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12258 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12259 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12260 IEM_MC_END();
12261 return VINF_SUCCESS;
12262}
12263#else
12264FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12265#endif
12266
12267/** Opcode 0x0f 0xc7 11/7. */
12268FNIEMOP_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
12269
12270
12271/**
12272 * Group 9 jump table for register variant.
12273 */
12274IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12275{ /* pfx: none, 066h, 0f3h, 0f2h */
12276 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12277 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12278 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12279 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12280 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12281 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12282 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12283 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12284};
12285AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12286
12287
12288/**
12289 * Group 9 jump table for memory variant.
12290 */
12291IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12292{ /* pfx: none, 066h, 0f3h, 0f2h */
12293 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12294 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12295 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12296 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12297 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12298 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12299 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12300 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12301};
12302AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12303
12304
12305/** Opcode 0x0f 0xc7. */
12306FNIEMOP_DEF(iemOp_Grp9)
12307{
12308 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
12309 if (IEM_IS_MODRM_REG_MODE(bRm))
12310 /* register, register */
12311 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12312 + pVCpu->iem.s.idxPrefix], bRm);
12313 /* memory, register */
12314 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12315 + pVCpu->iem.s.idxPrefix], bRm);
12316}
12317
12318
12319/**
12320 * Common 'bswap register' helper.
12321 */
12322FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12323{
12324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12325 switch (pVCpu->iem.s.enmEffOpSize)
12326 {
12327 case IEMMODE_16BIT:
12328 IEM_MC_BEGIN(1, 0);
12329 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12330 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12331 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12332 IEM_MC_ADVANCE_RIP();
12333 IEM_MC_END();
12334 return VINF_SUCCESS;
12335
12336 case IEMMODE_32BIT:
12337 IEM_MC_BEGIN(1, 0);
12338 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12339 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12340 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12341 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12342 IEM_MC_ADVANCE_RIP();
12343 IEM_MC_END();
12344 return VINF_SUCCESS;
12345
12346 case IEMMODE_64BIT:
12347 IEM_MC_BEGIN(1, 0);
12348 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12349 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12350 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12351 IEM_MC_ADVANCE_RIP();
12352 IEM_MC_END();
12353 return VINF_SUCCESS;
12354
12355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12356 }
12357}
12358
12359
12360/** Opcode 0x0f 0xc8. */
12361FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12362{
12363 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12364 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12365 prefix. REX.B is the correct prefix it appears. For a parallel
12366 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12367 IEMOP_HLP_MIN_486();
12368 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12369}
12370
12371
12372/** Opcode 0x0f 0xc9. */
12373FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12374{
12375 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12376 IEMOP_HLP_MIN_486();
12377 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12378}
12379
12380
12381/** Opcode 0x0f 0xca. */
12382FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12383{
12384 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
12385 IEMOP_HLP_MIN_486();
12386 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12387}
12388
12389
12390/** Opcode 0x0f 0xcb. */
12391FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12392{
12393 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
12394 IEMOP_HLP_MIN_486();
12395 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12396}
12397
12398
12399/** Opcode 0x0f 0xcc. */
12400FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12401{
12402 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12403 IEMOP_HLP_MIN_486();
12404 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12405}
12406
12407
12408/** Opcode 0x0f 0xcd. */
12409FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12410{
12411 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12412 IEMOP_HLP_MIN_486();
12413 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12414}
12415
12416
12417/** Opcode 0x0f 0xce. */
12418FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12419{
12420 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12421 IEMOP_HLP_MIN_486();
12422 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12423}
12424
12425
12426/** Opcode 0x0f 0xcf. */
12427FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12428{
12429 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12430 IEMOP_HLP_MIN_486();
12431 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12432}
12433
12434
12435/* Opcode 0x0f 0xd0 - invalid */
12436
12437
12438/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12439FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12440{
12441 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
12442 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12443}
12444
12445
12446/* Opcode 0xf3 0x0f 0xd0 - invalid */
12447
12448
12449/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12450FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12451{
12452 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
12453 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12454}
12455
12456
12457
12458/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12459FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12460{
12461 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12462 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12463}
12464
12465/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12466FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12467{
12468 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12469 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12470}
12471
12472/* Opcode 0xf3 0x0f 0xd1 - invalid */
12473/* Opcode 0xf2 0x0f 0xd1 - invalid */
12474
12475/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12476FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12477{
12478 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12479 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12480}
12481
12482
12483/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12484FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12485{
12486 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12487 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12488}
12489
12490
12491/* Opcode 0xf3 0x0f 0xd2 - invalid */
12492/* Opcode 0xf2 0x0f 0xd2 - invalid */
12493
12494/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12495FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12496{
12497 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12498 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12499}
12500
12501
12502/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12503FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12504{
12505 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12506 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12507}
12508
12509
12510/* Opcode 0xf3 0x0f 0xd3 - invalid */
12511/* Opcode 0xf2 0x0f 0xd3 - invalid */
12512
12513
12514/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12515FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12516{
12517 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12518 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12519}
12520
12521
12522/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12523FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12524{
12525 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12526 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12527}
12528
12529
12530/* Opcode 0xf3 0x0f 0xd4 - invalid */
12531/* Opcode 0xf2 0x0f 0xd4 - invalid */
12532
12533/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12534FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12535{
12536 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12537 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12538}
12539
12540/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12541FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12542{
12543 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12544 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12545}
12546
12547
12548/* Opcode 0xf3 0x0f 0xd5 - invalid */
12549/* Opcode 0xf2 0x0f 0xd5 - invalid */
12550
12551/* Opcode 0x0f 0xd6 - invalid */
12552
12553/**
12554 * @opcode 0xd6
12555 * @oppfx 0x66
12556 * @opcpuid sse2
12557 * @opgroup og_sse2_pcksclr_datamove
12558 * @opxcpttype none
12559 * @optest op1=-1 op2=2 -> op1=2
12560 * @optest op1=0 op2=-42 -> op1=-42
12561 */
12562FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12563{
12564 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12566 if (IEM_IS_MODRM_REG_MODE(bRm))
12567 {
12568 /*
12569 * Register, register.
12570 */
12571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12572 IEM_MC_BEGIN(0, 2);
12573 IEM_MC_LOCAL(uint64_t, uSrc);
12574
12575 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12576 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12577
12578 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12579 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12580
12581 IEM_MC_ADVANCE_RIP();
12582 IEM_MC_END();
12583 }
12584 else
12585 {
12586 /*
12587 * Memory, register.
12588 */
12589 IEM_MC_BEGIN(0, 2);
12590 IEM_MC_LOCAL(uint64_t, uSrc);
12591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12592
12593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12595 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12596 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12597
12598 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12599 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12600
12601 IEM_MC_ADVANCE_RIP();
12602 IEM_MC_END();
12603 }
12604 return VINF_SUCCESS;
12605}
12606
12607
12608/**
12609 * @opcode 0xd6
12610 * @opcodesub 11 mr/reg
12611 * @oppfx f3
12612 * @opcpuid sse2
12613 * @opgroup og_sse2_simdint_datamove
12614 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12615 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12616 */
12617FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12618{
12619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12620 if (IEM_IS_MODRM_REG_MODE(bRm))
12621 {
12622 /*
12623 * Register, register.
12624 */
12625 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12627 IEM_MC_BEGIN(0, 1);
12628 IEM_MC_LOCAL(uint64_t, uSrc);
12629
12630 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12631 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12632 IEM_MC_FPU_TO_MMX_MODE();
12633
12634 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12635 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12636
12637 IEM_MC_ADVANCE_RIP();
12638 IEM_MC_END();
12639 return VINF_SUCCESS;
12640 }
12641
12642 /**
12643 * @opdone
12644 * @opmnemonic udf30fd6mem
12645 * @opcode 0xd6
12646 * @opcodesub !11 mr/reg
12647 * @oppfx f3
12648 * @opunused intel-modrm
12649 * @opcpuid sse
12650 * @optest ->
12651 */
12652 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12653}
12654
12655
12656/**
12657 * @opcode 0xd6
12658 * @opcodesub 11 mr/reg
12659 * @oppfx f2
12660 * @opcpuid sse2
12661 * @opgroup og_sse2_simdint_datamove
12662 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12663 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12664 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12665 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12666 * @optest op1=-42 op2=0xfedcba9876543210
12667 * -> op1=0xfedcba9876543210 ftw=0xff
12668 */
12669FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12670{
12671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12672 if (IEM_IS_MODRM_REG_MODE(bRm))
12673 {
12674 /*
12675 * Register, register.
12676 */
12677 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12679 IEM_MC_BEGIN(0, 1);
12680 IEM_MC_LOCAL(uint64_t, uSrc);
12681
12682 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12683 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12684 IEM_MC_FPU_TO_MMX_MODE();
12685
12686 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12687 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12688
12689 IEM_MC_ADVANCE_RIP();
12690 IEM_MC_END();
12691 return VINF_SUCCESS;
12692 }
12693
12694 /**
12695 * @opdone
12696 * @opmnemonic udf20fd6mem
12697 * @opcode 0xd6
12698 * @opcodesub !11 mr/reg
12699 * @oppfx f2
12700 * @opunused intel-modrm
12701 * @opcpuid sse
12702 * @optest ->
12703 */
12704 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12705}
12706
12707
12708/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12709FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12710{
12711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12712 /* Docs says register only. */
12713 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12714 {
12715 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12716 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
12717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12718 IEM_MC_BEGIN(2, 0);
12719 IEM_MC_ARG(uint64_t *, puDst, 0);
12720 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12721 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12722 IEM_MC_PREPARE_FPU_USAGE();
12723 IEM_MC_FPU_TO_MMX_MODE();
12724
12725 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
12726 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12727 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12728
12729 IEM_MC_ADVANCE_RIP();
12730 IEM_MC_END();
12731 return VINF_SUCCESS;
12732 }
12733 return IEMOP_RAISE_INVALID_OPCODE();
12734}
12735
12736
12737/** Opcode 0x66 0x0f 0xd7 - */
12738FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12739{
12740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12741 /* Docs says register only. */
12742 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12743 {
12744 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12745 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
12746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12747 IEM_MC_BEGIN(2, 0);
12748 IEM_MC_ARG(uint64_t *, puDst, 0);
12749 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12750 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12751 IEM_MC_PREPARE_SSE_USAGE();
12752 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12753 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12754 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12755 IEM_MC_ADVANCE_RIP();
12756 IEM_MC_END();
12757 return VINF_SUCCESS;
12758 }
12759 return IEMOP_RAISE_INVALID_OPCODE();
12760}
12761
12762
12763/* Opcode 0xf3 0x0f 0xd7 - invalid */
12764/* Opcode 0xf2 0x0f 0xd7 - invalid */
12765
12766
12767/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12768FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12769{
12770 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12771 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12772}
12773
12774
12775/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12776FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12777{
12778 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12779 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12780}
12781
12782
12783/* Opcode 0xf3 0x0f 0xd8 - invalid */
12784/* Opcode 0xf2 0x0f 0xd8 - invalid */
12785
12786/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12787FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12788{
12789 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12790 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12791}
12792
12793
12794/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12795FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12796{
12797 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12798 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12799}
12800
12801
12802/* Opcode 0xf3 0x0f 0xd9 - invalid */
12803/* Opcode 0xf2 0x0f 0xd9 - invalid */
12804
12805/** Opcode 0x0f 0xda - pminub Pq, Qq */
12806FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12807{
12808 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12809 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12810}
12811
12812
12813/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12814FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12815{
12816 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12817 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12818}
12819
12820/* Opcode 0xf3 0x0f 0xda - invalid */
12821/* Opcode 0xf2 0x0f 0xda - invalid */
12822
12823/** Opcode 0x0f 0xdb - pand Pq, Qq */
12824FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12825{
12826 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12827 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12828}
12829
12830
12831/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12832FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12833{
12834 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12835 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12836}
12837
12838
12839/* Opcode 0xf3 0x0f 0xdb - invalid */
12840/* Opcode 0xf2 0x0f 0xdb - invalid */
12841
12842/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12843FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12844{
12845 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12846 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12847}
12848
12849
12850/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12851FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12852{
12853 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12854 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12855}
12856
12857
12858/* Opcode 0xf3 0x0f 0xdc - invalid */
12859/* Opcode 0xf2 0x0f 0xdc - invalid */
12860
12861/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12862FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12863{
12864 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12865 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12866}
12867
12868
12869/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
12870FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
12871{
12872 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12873 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
12874}
12875
12876
12877/* Opcode 0xf3 0x0f 0xdd - invalid */
12878/* Opcode 0xf2 0x0f 0xdd - invalid */
12879
12880/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
12881FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
12882{
12883 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12884 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
12885}
12886
12887
12888/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
12889FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
12890{
12891 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12892 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
12893}
12894
12895/* Opcode 0xf3 0x0f 0xde - invalid */
12896/* Opcode 0xf2 0x0f 0xde - invalid */
12897
12898
12899/** Opcode 0x0f 0xdf - pandn Pq, Qq */
12900FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
12901{
12902 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12903 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
12904}
12905
12906
12907/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
12908FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
12909{
12910 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12911 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
12912}
12913
12914
12915/* Opcode 0xf3 0x0f 0xdf - invalid */
12916/* Opcode 0xf2 0x0f 0xdf - invalid */
12917
12918/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
12919FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
12920{
12921 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12922 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
12923}
12924
12925
12926/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
12927FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
12928{
12929 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12930 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
12931}
12932
12933
12934/* Opcode 0xf3 0x0f 0xe0 - invalid */
12935/* Opcode 0xf2 0x0f 0xe0 - invalid */
12936
12937/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
12938FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
12939{
12940 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12941 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
12942}
12943
12944
12945/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
12946FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
12947{
12948 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12949 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
12950}
12951
12952
12953/* Opcode 0xf3 0x0f 0xe1 - invalid */
12954/* Opcode 0xf2 0x0f 0xe1 - invalid */
12955
12956/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
12957FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
12958{
12959 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12960 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
12961}
12962
12963
12964/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
12965FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
12966{
12967 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12968 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
12969}
12970
12971
12972/* Opcode 0xf3 0x0f 0xe2 - invalid */
12973/* Opcode 0xf2 0x0f 0xe2 - invalid */
12974
12975/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
12976FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
12977{
12978 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12979 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
12980}
12981
12982
12983/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
12984FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
12985{
12986 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12987 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
12988}
12989
12990
12991/* Opcode 0xf3 0x0f 0xe3 - invalid */
12992/* Opcode 0xf2 0x0f 0xe3 - invalid */
12993
12994/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
12995FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
12996{
12997 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12998 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
12999}
13000
13001
13002/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13003FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13004{
13005 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13006 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13007}
13008
13009
13010/* Opcode 0xf3 0x0f 0xe4 - invalid */
13011/* Opcode 0xf2 0x0f 0xe4 - invalid */
13012
13013/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13014FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13015{
13016 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13017 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13018}
13019
13020
13021/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13022FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13023{
13024 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13025 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13026}
13027
13028
13029/* Opcode 0xf3 0x0f 0xe5 - invalid */
13030/* Opcode 0xf2 0x0f 0xe5 - invalid */
13031/* Opcode 0x0f 0xe6 - invalid */
13032
13033
13034/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13035FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13036{
13037 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13038 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13039}
13040
13041
13042/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13043FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13044{
13045 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13046 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13047}
13048
13049
13050/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13051FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13052{
13053 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13054 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13055}
13056
13057
13058/**
13059 * @opcode 0xe7
13060 * @opcodesub !11 mr/reg
13061 * @oppfx none
13062 * @opcpuid sse
13063 * @opgroup og_sse1_cachect
13064 * @opxcpttype none
13065 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13066 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13067 */
13068FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13069{
13070 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13072 if (IEM_IS_MODRM_MEM_MODE(bRm))
13073 {
13074 /* Register, memory. */
13075 IEM_MC_BEGIN(0, 2);
13076 IEM_MC_LOCAL(uint64_t, uSrc);
13077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13078
13079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13081 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13082 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13083 IEM_MC_FPU_TO_MMX_MODE();
13084
13085 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13086 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13087
13088 IEM_MC_ADVANCE_RIP();
13089 IEM_MC_END();
13090 return VINF_SUCCESS;
13091 }
13092 /**
13093 * @opdone
13094 * @opmnemonic ud0fe7reg
13095 * @opcode 0xe7
13096 * @opcodesub 11 mr/reg
13097 * @oppfx none
13098 * @opunused immediate
13099 * @opcpuid sse
13100 * @optest ->
13101 */
13102 return IEMOP_RAISE_INVALID_OPCODE();
13103}
13104
13105/**
13106 * @opcode 0xe7
13107 * @opcodesub !11 mr/reg
13108 * @oppfx 0x66
13109 * @opcpuid sse2
13110 * @opgroup og_sse2_cachect
13111 * @opxcpttype 1
13112 * @optest op1=-1 op2=2 -> op1=2
13113 * @optest op1=0 op2=-42 -> op1=-42
13114 */
13115FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13116{
13117 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13119 if (IEM_IS_MODRM_MEM_MODE(bRm))
13120 {
13121 /* Register, memory. */
13122 IEM_MC_BEGIN(0, 2);
13123 IEM_MC_LOCAL(RTUINT128U, uSrc);
13124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13125
13126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13128 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
13129 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13130
13131 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13132 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13133
13134 IEM_MC_ADVANCE_RIP();
13135 IEM_MC_END();
13136 return VINF_SUCCESS;
13137 }
13138
13139 /**
13140 * @opdone
13141 * @opmnemonic ud660fe7reg
13142 * @opcode 0xe7
13143 * @opcodesub 11 mr/reg
13144 * @oppfx 0x66
13145 * @opunused immediate
13146 * @opcpuid sse
13147 * @optest ->
13148 */
13149 return IEMOP_RAISE_INVALID_OPCODE();
13150}
13151
13152/* Opcode 0xf3 0x0f 0xe7 - invalid */
13153/* Opcode 0xf2 0x0f 0xe7 - invalid */
13154
13155
13156/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13157FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13158{
13159 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13160 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13161}
13162
13163
13164/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13165FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13166{
13167 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13168 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13169}
13170
13171
13172/* Opcode 0xf3 0x0f 0xe8 - invalid */
13173/* Opcode 0xf2 0x0f 0xe8 - invalid */
13174
13175/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13176FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13177{
13178 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13179 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13180}
13181
13182
13183/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13184FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13185{
13186 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13187 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13188}
13189
13190
13191/* Opcode 0xf3 0x0f 0xe9 - invalid */
13192/* Opcode 0xf2 0x0f 0xe9 - invalid */
13193
13194
13195/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13196FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13197{
13198 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13199 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13200}
13201
13202
13203/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13204FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13205{
13206 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13207 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13208}
13209
13210
13211/* Opcode 0xf3 0x0f 0xea - invalid */
13212/* Opcode 0xf2 0x0f 0xea - invalid */
13213
13214
13215/** Opcode 0x0f 0xeb - por Pq, Qq */
13216FNIEMOP_DEF(iemOp_por_Pq_Qq)
13217{
13218 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13219 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13220}
13221
13222
13223/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13224FNIEMOP_DEF(iemOp_por_Vx_Wx)
13225{
13226 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13227 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13228}
13229
13230
13231/* Opcode 0xf3 0x0f 0xeb - invalid */
13232/* Opcode 0xf2 0x0f 0xeb - invalid */
13233
13234/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13235FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13236{
13237 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13238 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13239}
13240
13241
13242/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13243FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13244{
13245 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13246 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13247}
13248
13249
13250/* Opcode 0xf3 0x0f 0xec - invalid */
13251/* Opcode 0xf2 0x0f 0xec - invalid */
13252
13253/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13254FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13255{
13256 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13257 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13258}
13259
13260
13261/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13262FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13263{
13264 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13265 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13266}
13267
13268
13269/* Opcode 0xf3 0x0f 0xed - invalid */
13270/* Opcode 0xf2 0x0f 0xed - invalid */
13271
13272
13273/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13274FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13275{
13276 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13277 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13278}
13279
13280
13281/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13282FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13283{
13284 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13285 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13286}
13287
13288
13289/* Opcode 0xf3 0x0f 0xee - invalid */
13290/* Opcode 0xf2 0x0f 0xee - invalid */
13291
13292
13293/** Opcode 0x0f 0xef - pxor Pq, Qq */
13294FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13295{
13296 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13297 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13298}
13299
13300
13301/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13302FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13303{
13304 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13305 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13306}
13307
13308
13309/* Opcode 0xf3 0x0f 0xef - invalid */
13310/* Opcode 0xf2 0x0f 0xef - invalid */
13311
13312/* Opcode 0x0f 0xf0 - invalid */
13313/* Opcode 0x66 0x0f 0xf0 - invalid */
13314
13315
13316/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13317FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13318{
13319 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13321 if (IEM_IS_MODRM_REG_MODE(bRm))
13322 {
13323 /*
13324 * Register, register - (not implemented, assuming it raises \#UD).
13325 */
13326 return IEMOP_RAISE_INVALID_OPCODE();
13327 }
13328 else
13329 {
13330 /*
13331 * Register, memory.
13332 */
13333 IEM_MC_BEGIN(0, 2);
13334 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13336
13337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13339 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
13340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13341 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13342 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13343
13344 IEM_MC_ADVANCE_RIP();
13345 IEM_MC_END();
13346 }
13347 return VINF_SUCCESS;
13348}
13349
13350
13351/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13352FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13353{
13354 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13355 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13356}
13357
13358
13359/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13360FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13361{
13362 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13363 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13364}
13365
13366
13367/* Opcode 0xf2 0x0f 0xf1 - invalid */
13368
13369/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13370FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13371{
13372 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13373 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13374}
13375
13376
13377/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13378FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13379{
13380 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13381 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13382}
13383
13384
13385/* Opcode 0xf2 0x0f 0xf2 - invalid */
13386
13387/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13388FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13389{
13390 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13391 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13392}
13393
13394
13395/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13396FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13397{
13398 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13399 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13400}
13401
13402/* Opcode 0xf2 0x0f 0xf3 - invalid */
13403
13404/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13405FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13406{
13407 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13408 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13409}
13410
13411
13412/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13413FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13414{
13415 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13416 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13417}
13418
13419
13420/* Opcode 0xf2 0x0f 0xf4 - invalid */
13421
13422/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13423FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13424{
13425 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13426 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13427}
13428
13429
13430/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13431FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13432{
13433 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13434 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13435}
13436
13437/* Opcode 0xf2 0x0f 0xf5 - invalid */
13438
13439/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13440FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13441{
13442 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13443 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13444}
13445
13446
13447/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13448FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13449{
13450 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13451 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13452}
13453
13454
13455/* Opcode 0xf2 0x0f 0xf6 - invalid */
13456
13457/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13458FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13459/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13460FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13461/* Opcode 0xf2 0x0f 0xf7 - invalid */
13462
13463
13464/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13465FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13466{
13467 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13468 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13469}
13470
13471
13472/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13473FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13474{
13475 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13476 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13477}
13478
13479
13480/* Opcode 0xf2 0x0f 0xf8 - invalid */
13481
13482
13483/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13484FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13485{
13486 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13487 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13488}
13489
13490
13491/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13492FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13493{
13494 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13495 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13496}
13497
13498
13499/* Opcode 0xf2 0x0f 0xf9 - invalid */
13500
13501
13502/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13503FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13504{
13505 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13506 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13507}
13508
13509
13510/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13511FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13512{
13513 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13514 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13515}
13516
13517
13518/* Opcode 0xf2 0x0f 0xfa - invalid */
13519
13520
13521/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13522FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13523{
13524 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13525 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13526}
13527
13528
13529/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13530FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13531{
13532 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13533 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13534}
13535
13536
13537/* Opcode 0xf2 0x0f 0xfb - invalid */
13538
13539
13540/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13541FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13542{
13543 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13544 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13545}
13546
13547
13548/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13549FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13550{
13551 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13552 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13553}
13554
13555
13556/* Opcode 0xf2 0x0f 0xfc - invalid */
13557
13558
13559/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13560FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13561{
13562 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13563 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13564}
13565
13566
13567/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13568FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13569{
13570 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13571 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13572}
13573
13574
13575/* Opcode 0xf2 0x0f 0xfd - invalid */
13576
13577
13578/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13579FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13580{
13581 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13582 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13583}
13584
13585
13586/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13587FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13588{
13589 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13590 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13591}
13592
13593
13594/* Opcode 0xf2 0x0f 0xfe - invalid */
13595
13596
13597/** Opcode **** 0x0f 0xff - UD0 */
13598FNIEMOP_DEF(iemOp_ud0)
13599{
13600 IEMOP_MNEMONIC(ud0, "ud0");
13601 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13602 {
13603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13604#ifndef TST_IEM_CHECK_MC
13605 if (IEM_IS_MODRM_MEM_MODE(bRm))
13606 {
13607 RTGCPTR GCPtrEff;
13608 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13609 if (rcStrict != VINF_SUCCESS)
13610 return rcStrict;
13611 }
13612#endif
13613 IEMOP_HLP_DONE_DECODING();
13614 }
13615 return IEMOP_RAISE_INVALID_OPCODE();
13616}
13617
13618
13619
13620/**
13621 * Two byte opcode map, first byte 0x0f.
13622 *
13623 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13624 * check if it needs updating as well when making changes.
13625 */
13626IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13627{
13628 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13629 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13630 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13631 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13632 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13633 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13634 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13635 /* 0x06 */ IEMOP_X4(iemOp_clts),
13636 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13637 /* 0x08 */ IEMOP_X4(iemOp_invd),
13638 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13639 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13640 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13641 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13642 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13643 /* 0x0e */ IEMOP_X4(iemOp_femms),
13644 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13645
13646 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13647 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13648 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13649 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13650 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13651 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13652 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13653 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13654 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13655 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13656 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13657 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13658 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13659 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13660 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13661 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13662
13663 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13664 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13665 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13666 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13667 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13668 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13669 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13670 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13671 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13672 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13673 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13674 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13675 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13676 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13677 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13678 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13679
13680 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13681 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13682 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13683 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13684 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13685 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13686 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13687 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13688 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13689 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13690 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13691 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13692 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13693 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13694 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13695 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13696
13697 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13698 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13699 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13700 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13701 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13702 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13703 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13704 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13705 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13706 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13707 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13708 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13709 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13710 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13711 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13712 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13713
13714 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13715 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13716 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13717 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13718 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13719 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13720 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13721 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13722 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13723 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13724 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13725 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13726 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13727 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13728 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13729 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13730
13731 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13732 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13733 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13734 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13735 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13736 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13737 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13738 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13739 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13740 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13741 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13742 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13743 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13744 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13745 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13746 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13747
13748 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13749 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13750 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13751 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13752 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13753 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13754 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13755 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13756
13757 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13758 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13759 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13760 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13761 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13762 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13763 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13764 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13765
13766 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13767 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13768 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13769 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13770 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13771 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13772 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13773 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13774 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13775 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13776 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13777 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13778 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13779 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13780 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13781 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13782
13783 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13784 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13785 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13786 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13787 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13788 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13789 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13790 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13791 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13792 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13793 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13794 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13795 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13796 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13797 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13798 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13799
13800 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13801 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13802 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13803 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13804 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13805 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13806 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13807 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13808 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13809 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13810 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13811 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13812 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13813 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13814 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13815 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13816
13817 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13818 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13819 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13820 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13821 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13822 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13823 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13824 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13825 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13826 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13827 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13828 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13829 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13830 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13831 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13832 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13833
13834 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13835 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13836 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13837 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13838 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13839 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13840 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13841 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13842 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13843 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13844 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13845 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13846 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13847 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13848 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13849 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13850
13851 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13852 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13853 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13854 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13855 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13856 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13857 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13858 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13859 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13860 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13861 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13862 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13863 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13864 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13865 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13866 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13867
13868 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13869 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13870 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13871 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13872 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13873 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13874 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13875 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13876 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13877 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13878 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13879 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13880 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13881 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13882 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13883 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13884
13885 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
13886 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13887 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13888 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13889 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13890 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13891 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13892 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13893 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13894 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13895 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13896 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13897 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13898 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13899 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13900 /* 0xff */ IEMOP_X4(iemOp_ud0),
13901};
13902AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
13903
13904/** @} */
13905
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette