VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96789

Last change on this file since 96789 was 96789, checked in by vboxsync, 2 years ago

VMM/IEM: Implement cmpps/cmpss/cmppd/cmpsd instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 460.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96789 2022-09-19 13:04:06Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * Register, register.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
59 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
60 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
61 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
62 IEM_MC_FPU_TO_MMX_MODE();
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 }
66 else
67 {
68 /*
69 * Register, memory.
70 */
71 IEM_MC_BEGIN(2, 2);
72 IEM_MC_ARG(uint64_t *, pDst, 0);
73 IEM_MC_LOCAL(uint64_t, uSrc);
74 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
75 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
76
77 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
79 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
80 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
81
82 IEM_MC_PREPARE_FPU_USAGE();
83 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
84 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
85 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
86 IEM_MC_FPU_TO_MMX_MODE();
87
88 IEM_MC_ADVANCE_RIP();
89 IEM_MC_END();
90 }
91 return VINF_SUCCESS;
92}
93
94
95/**
96 * Common worker for MMX instructions on the form:
97 * pxxx mm1, mm2/mem64
98 *
99 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
100 * no FXSAVE state, just the operands.
101 */
102FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
103{
104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
105 if (IEM_IS_MODRM_REG_MODE(bRm))
106 {
107 /*
108 * Register, register.
109 */
110 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
111 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
113 IEM_MC_BEGIN(2, 0);
114 IEM_MC_ARG(uint64_t *, pDst, 0);
115 IEM_MC_ARG(uint64_t const *, pSrc, 1);
116 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
117 IEM_MC_PREPARE_FPU_USAGE();
118 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
119 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
120 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
121 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
122 IEM_MC_FPU_TO_MMX_MODE();
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 }
126 else
127 {
128 /*
129 * Register, memory.
130 */
131 IEM_MC_BEGIN(2, 2);
132 IEM_MC_ARG(uint64_t *, pDst, 0);
133 IEM_MC_LOCAL(uint64_t, uSrc);
134 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
136
137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
139 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
140 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
141
142 IEM_MC_PREPARE_FPU_USAGE();
143 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
144 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
145 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
146 IEM_MC_FPU_TO_MMX_MODE();
147
148 IEM_MC_ADVANCE_RIP();
149 IEM_MC_END();
150 }
151 return VINF_SUCCESS;
152}
153
154
155/**
156 * Common worker for MMX instructions on the form:
157 * pxxx mm1, mm2/mem64
158 * for instructions introduced with SSE.
159 */
160FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
161{
162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
163 if (IEM_IS_MODRM_REG_MODE(bRm))
164 {
165 /*
166 * Register, register.
167 */
168 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
169 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
171 IEM_MC_BEGIN(2, 0);
172 IEM_MC_ARG(uint64_t *, pDst, 0);
173 IEM_MC_ARG(uint64_t const *, pSrc, 1);
174 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
175 IEM_MC_PREPARE_FPU_USAGE();
176 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
177 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
178 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
179 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
180 IEM_MC_FPU_TO_MMX_MODE();
181 IEM_MC_ADVANCE_RIP();
182 IEM_MC_END();
183 }
184 else
185 {
186 /*
187 * Register, memory.
188 */
189 IEM_MC_BEGIN(2, 2);
190 IEM_MC_ARG(uint64_t *, pDst, 0);
191 IEM_MC_LOCAL(uint64_t, uSrc);
192 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
194
195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
197 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
198 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
199
200 IEM_MC_PREPARE_FPU_USAGE();
201 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
202 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
203 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
204 IEM_MC_FPU_TO_MMX_MODE();
205
206 IEM_MC_ADVANCE_RIP();
207 IEM_MC_END();
208 }
209 return VINF_SUCCESS;
210}
211
212
213/**
214 * Common worker for MMX instructions on the form:
215 * pxxx mm1, mm2/mem64
216 * for instructions introduced with SSE.
217 *
218 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
219 * no FXSAVE state, just the operands.
220 */
221FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
222{
223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
224 if (IEM_IS_MODRM_REG_MODE(bRm))
225 {
226 /*
227 * Register, register.
228 */
229 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
230 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
232 IEM_MC_BEGIN(2, 0);
233 IEM_MC_ARG(uint64_t *, pDst, 0);
234 IEM_MC_ARG(uint64_t const *, pSrc, 1);
235 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
236 IEM_MC_PREPARE_FPU_USAGE();
237 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
238 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
239 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
240 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
241 IEM_MC_FPU_TO_MMX_MODE();
242 IEM_MC_ADVANCE_RIP();
243 IEM_MC_END();
244 }
245 else
246 {
247 /*
248 * Register, memory.
249 */
250 IEM_MC_BEGIN(2, 2);
251 IEM_MC_ARG(uint64_t *, pDst, 0);
252 IEM_MC_LOCAL(uint64_t, uSrc);
253 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
255
256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
258 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
259 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
260
261 IEM_MC_PREPARE_FPU_USAGE();
262 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
263 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
264 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
265 IEM_MC_FPU_TO_MMX_MODE();
266
267 IEM_MC_ADVANCE_RIP();
268 IEM_MC_END();
269 }
270 return VINF_SUCCESS;
271}
272
273
274/**
275 * Common worker for MMX instructions on the form:
276 * pxxx mm1, mm2/mem64
277 * that was introduced with SSE2.
278 */
279FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
280{
281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
282 if (IEM_IS_MODRM_REG_MODE(bRm))
283 {
284 /*
285 * Register, register.
286 */
287 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
288 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_BEGIN(2, 0);
291 IEM_MC_ARG(uint64_t *, pDst, 0);
292 IEM_MC_ARG(uint64_t const *, pSrc, 1);
293 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
294 IEM_MC_PREPARE_FPU_USAGE();
295 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
296 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
297 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
298 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
299 IEM_MC_FPU_TO_MMX_MODE();
300 IEM_MC_ADVANCE_RIP();
301 IEM_MC_END();
302 }
303 else
304 {
305 /*
306 * Register, memory.
307 */
308 IEM_MC_BEGIN(2, 2);
309 IEM_MC_ARG(uint64_t *, pDst, 0);
310 IEM_MC_LOCAL(uint64_t, uSrc);
311 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
313
314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
316 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
317 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
318
319 IEM_MC_PREPARE_FPU_USAGE();
320 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
321 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
322 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
323 IEM_MC_FPU_TO_MMX_MODE();
324
325 IEM_MC_ADVANCE_RIP();
326 IEM_MC_END();
327 }
328 return VINF_SUCCESS;
329}
330
331
332/**
333 * Common worker for SSE2 instructions on the forms:
334 * pxxx xmm1, xmm2/mem128
335 *
336 * Proper alignment of the 128-bit operand is enforced.
337 * Exceptions type 4. SSE2 cpuid checks.
338 *
339 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
340 */
341FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
342{
343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
344 if (IEM_IS_MODRM_REG_MODE(bRm))
345 {
346 /*
347 * Register, register.
348 */
349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
350 IEM_MC_BEGIN(2, 0);
351 IEM_MC_ARG(PRTUINT128U, pDst, 0);
352 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
354 IEM_MC_PREPARE_SSE_USAGE();
355 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
356 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
357 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
358 IEM_MC_ADVANCE_RIP();
359 IEM_MC_END();
360 }
361 else
362 {
363 /*
364 * Register, memory.
365 */
366 IEM_MC_BEGIN(2, 2);
367 IEM_MC_ARG(PRTUINT128U, pDst, 0);
368 IEM_MC_LOCAL(RTUINT128U, uSrc);
369 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
371
372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
375 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
376
377 IEM_MC_PREPARE_SSE_USAGE();
378 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
379 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
380
381 IEM_MC_ADVANCE_RIP();
382 IEM_MC_END();
383 }
384 return VINF_SUCCESS;
385}
386
387
388/**
389 * Common worker for SSE2 instructions on the forms:
390 * pxxx xmm1, xmm2/mem128
391 *
392 * Proper alignment of the 128-bit operand is enforced.
393 * Exceptions type 4. SSE2 cpuid checks.
394 *
395 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
396 * no FXSAVE state, just the operands.
397 *
398 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
399 */
400FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
401{
402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
403 if (IEM_IS_MODRM_REG_MODE(bRm))
404 {
405 /*
406 * Register, register.
407 */
408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
409 IEM_MC_BEGIN(2, 0);
410 IEM_MC_ARG(PRTUINT128U, pDst, 0);
411 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
412 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
413 IEM_MC_PREPARE_SSE_USAGE();
414 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
415 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
416 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
417 IEM_MC_ADVANCE_RIP();
418 IEM_MC_END();
419 }
420 else
421 {
422 /*
423 * Register, memory.
424 */
425 IEM_MC_BEGIN(2, 2);
426 IEM_MC_ARG(PRTUINT128U, pDst, 0);
427 IEM_MC_LOCAL(RTUINT128U, uSrc);
428 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
434 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
435
436 IEM_MC_PREPARE_SSE_USAGE();
437 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
438 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
439
440 IEM_MC_ADVANCE_RIP();
441 IEM_MC_END();
442 }
443 return VINF_SUCCESS;
444}
445
446
447/**
448 * Common worker for MMX instructions on the forms:
449 * pxxxx mm1, mm2/mem32
450 *
451 * The 2nd operand is the first half of a register, which in the memory case
452 * means a 32-bit memory access.
453 */
454FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
455{
456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
457 if (IEM_IS_MODRM_REG_MODE(bRm))
458 {
459 /*
460 * Register, register.
461 */
462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
463 IEM_MC_BEGIN(2, 0);
464 IEM_MC_ARG(uint64_t *, puDst, 0);
465 IEM_MC_ARG(uint64_t const *, puSrc, 1);
466 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
467 IEM_MC_PREPARE_FPU_USAGE();
468 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
469 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
470 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
471 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
472 IEM_MC_FPU_TO_MMX_MODE();
473 IEM_MC_ADVANCE_RIP();
474 IEM_MC_END();
475 }
476 else
477 {
478 /*
479 * Register, memory.
480 */
481 IEM_MC_BEGIN(2, 2);
482 IEM_MC_ARG(uint64_t *, puDst, 0);
483 IEM_MC_LOCAL(uint64_t, uSrc);
484 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
486
487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
489 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
490 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
491
492 IEM_MC_PREPARE_FPU_USAGE();
493 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
494 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
495 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
496 IEM_MC_FPU_TO_MMX_MODE();
497
498 IEM_MC_ADVANCE_RIP();
499 IEM_MC_END();
500 }
501 return VINF_SUCCESS;
502}
503
504
505/**
506 * Common worker for SSE instructions on the forms:
507 * pxxxx xmm1, xmm2/mem128
508 *
509 * The 2nd operand is the first half of a register, which in the memory case
510 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
511 *
512 * Exceptions type 4.
513 */
514FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
515{
516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
517 if (IEM_IS_MODRM_REG_MODE(bRm))
518 {
519 /*
520 * Register, register.
521 */
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
523 IEM_MC_BEGIN(2, 0);
524 IEM_MC_ARG(PRTUINT128U, puDst, 0);
525 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
526 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
528 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
529 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
530 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
531 IEM_MC_ADVANCE_RIP();
532 IEM_MC_END();
533 }
534 else
535 {
536 /*
537 * Register, memory.
538 */
539 IEM_MC_BEGIN(2, 2);
540 IEM_MC_ARG(PRTUINT128U, puDst, 0);
541 IEM_MC_LOCAL(RTUINT128U, uSrc);
542 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
544
545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
548 /** @todo Most CPUs probably only read the low qword. We read everything to
549 * make sure we apply segmentation and alignment checks correctly.
550 * When we have time, it would be interesting to explore what real
551 * CPUs actually does and whether it will do a TLB load for the high
552 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
553 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
554
555 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
556 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
557 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
558
559 IEM_MC_ADVANCE_RIP();
560 IEM_MC_END();
561 }
562 return VINF_SUCCESS;
563}
564
565
566/**
567 * Common worker for SSE2 instructions on the forms:
568 * pxxxx xmm1, xmm2/mem128
569 *
570 * The 2nd operand is the first half of a register, which in the memory case
571 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
572 *
573 * Exceptions type 4.
574 */
575FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
576{
577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
578 if (IEM_IS_MODRM_REG_MODE(bRm))
579 {
580 /*
581 * Register, register.
582 */
583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
584 IEM_MC_BEGIN(2, 0);
585 IEM_MC_ARG(PRTUINT128U, puDst, 0);
586 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
587 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
589 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
590 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
591 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
592 IEM_MC_ADVANCE_RIP();
593 IEM_MC_END();
594 }
595 else
596 {
597 /*
598 * Register, memory.
599 */
600 IEM_MC_BEGIN(2, 2);
601 IEM_MC_ARG(PRTUINT128U, puDst, 0);
602 IEM_MC_LOCAL(RTUINT128U, uSrc);
603 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
605
606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
608 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
609 /** @todo Most CPUs probably only read the low qword. We read everything to
610 * make sure we apply segmentation and alignment checks correctly.
611 * When we have time, it would be interesting to explore what real
612 * CPUs actually does and whether it will do a TLB load for the high
613 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
614 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
615
616 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
617 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
618 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
619
620 IEM_MC_ADVANCE_RIP();
621 IEM_MC_END();
622 }
623 return VINF_SUCCESS;
624}
625
626
627/**
628 * Common worker for MMX instructions on the form:
629 * pxxxx mm1, mm2/mem64
630 *
631 * The 2nd operand is the second half of a register, which in the memory case
632 * means a 64-bit memory access for MMX.
633 */
634FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
635{
636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
637 if (IEM_IS_MODRM_REG_MODE(bRm))
638 {
639 /*
640 * Register, register.
641 */
642 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
643 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 IEM_MC_BEGIN(2, 0);
646 IEM_MC_ARG(uint64_t *, puDst, 0);
647 IEM_MC_ARG(uint64_t const *, puSrc, 1);
648 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
649 IEM_MC_PREPARE_FPU_USAGE();
650 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
651 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
652 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
653 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
654 IEM_MC_FPU_TO_MMX_MODE();
655 IEM_MC_ADVANCE_RIP();
656 IEM_MC_END();
657 }
658 else
659 {
660 /*
661 * Register, memory.
662 */
663 IEM_MC_BEGIN(2, 2);
664 IEM_MC_ARG(uint64_t *, puDst, 0);
665 IEM_MC_LOCAL(uint64_t, uSrc);
666 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
668
669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
671 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
672 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
673
674 IEM_MC_PREPARE_FPU_USAGE();
675 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
676 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
677 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
678 IEM_MC_FPU_TO_MMX_MODE();
679
680 IEM_MC_ADVANCE_RIP();
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/**
688 * Common worker for SSE instructions on the form:
689 * pxxxx xmm1, xmm2/mem128
690 *
691 * The 2nd operand is the second half of a register, which for SSE a 128-bit
692 * aligned access where it may read the full 128 bits or only the upper 64 bits.
693 *
694 * Exceptions type 4.
695 */
696FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
697{
698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
699 if (IEM_IS_MODRM_REG_MODE(bRm))
700 {
701 /*
702 * Register, register.
703 */
704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
705 IEM_MC_BEGIN(2, 0);
706 IEM_MC_ARG(PRTUINT128U, puDst, 0);
707 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
708 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
709 IEM_MC_PREPARE_SSE_USAGE();
710 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
711 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
712 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
713 IEM_MC_ADVANCE_RIP();
714 IEM_MC_END();
715 }
716 else
717 {
718 /*
719 * Register, memory.
720 */
721 IEM_MC_BEGIN(2, 2);
722 IEM_MC_ARG(PRTUINT128U, puDst, 0);
723 IEM_MC_LOCAL(RTUINT128U, uSrc);
724 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
726
727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
730 /** @todo Most CPUs probably only read the high qword. We read everything to
731 * make sure we apply segmentation and alignment checks correctly.
732 * When we have time, it would be interesting to explore what real
733 * CPUs actually does and whether it will do a TLB load for the lower
734 * part or skip any associated \#PF. */
735 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
736
737 IEM_MC_PREPARE_SSE_USAGE();
738 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
739 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
740
741 IEM_MC_ADVANCE_RIP();
742 IEM_MC_END();
743 }
744 return VINF_SUCCESS;
745}
746
747
748/**
749 * Common worker for SSE instructions on the forms:
750 * pxxs xmm1, xmm2/mem128
751 *
752 * Proper alignment of the 128-bit operand is enforced.
753 * Exceptions type 2. SSE cpuid checks.
754 *
755 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
756 */
757FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
758{
759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
760 if (IEM_IS_MODRM_REG_MODE(bRm))
761 {
762 /*
763 * Register, register.
764 */
765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
766 IEM_MC_BEGIN(3, 1);
767 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
768 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
769 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
770 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
771 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
772 IEM_MC_PREPARE_SSE_USAGE();
773 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
774 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
775 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
776 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
777 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
778
779 IEM_MC_ADVANCE_RIP();
780 IEM_MC_END();
781 }
782 else
783 {
784 /*
785 * Register, memory.
786 */
787 IEM_MC_BEGIN(3, 2);
788 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
789 IEM_MC_LOCAL(X86XMMREG, uSrc2);
790 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
791 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
792 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
794
795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
797 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
798 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
799
800 IEM_MC_PREPARE_SSE_USAGE();
801 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
802 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
803 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
805
806 IEM_MC_ADVANCE_RIP();
807 IEM_MC_END();
808 }
809 return VINF_SUCCESS;
810}
811
812
813/**
814 * Common worker for SSE instructions on the forms:
815 * pxxs xmm1, xmm2/mem32
816 *
817 * Proper alignment of the 128-bit operand is enforced.
818 * Exceptions type 2. SSE cpuid checks.
819 *
820 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
821 */
822FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
823{
824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
825 if (IEM_IS_MODRM_REG_MODE(bRm))
826 {
827 /*
828 * Register, register.
829 */
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_BEGIN(3, 1);
832 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
833 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
834 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
835 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
836 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
837 IEM_MC_PREPARE_SSE_USAGE();
838 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
839 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
840 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
841 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
842 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
843
844 IEM_MC_ADVANCE_RIP();
845 IEM_MC_END();
846 }
847 else
848 {
849 /*
850 * Register, memory.
851 */
852 IEM_MC_BEGIN(3, 2);
853 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
854 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
855 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
856 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
857 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
859
860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
862 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
863 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
864
865 IEM_MC_PREPARE_SSE_USAGE();
866 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
867 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
868 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
869 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
870
871 IEM_MC_ADVANCE_RIP();
872 IEM_MC_END();
873 }
874 return VINF_SUCCESS;
875}
876
877
878/**
879 * Common worker for SSE2 instructions on the forms:
880 * pxxd xmm1, xmm2/mem128
881 *
882 * Proper alignment of the 128-bit operand is enforced.
883 * Exceptions type 2. SSE cpuid checks.
884 *
885 * @sa iemOpCommonSseFp_FullFull_To_Full
886 */
887FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
888{
889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
890 if (IEM_IS_MODRM_REG_MODE(bRm))
891 {
892 /*
893 * Register, register.
894 */
895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
896 IEM_MC_BEGIN(3, 1);
897 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
898 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
899 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
900 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
901 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
902 IEM_MC_PREPARE_SSE_USAGE();
903 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
904 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
905 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
906 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
907 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
908
909 IEM_MC_ADVANCE_RIP();
910 IEM_MC_END();
911 }
912 else
913 {
914 /*
915 * Register, memory.
916 */
917 IEM_MC_BEGIN(3, 2);
918 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
919 IEM_MC_LOCAL(X86XMMREG, uSrc2);
920 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
921 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
922 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
924
925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
927 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
928 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
929
930 IEM_MC_PREPARE_SSE_USAGE();
931 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
933 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
934 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
935
936 IEM_MC_ADVANCE_RIP();
937 IEM_MC_END();
938 }
939 return VINF_SUCCESS;
940}
941
942
943/**
944 * Common worker for SSE2 instructions on the forms:
945 * pxxs xmm1, xmm2/mem64
946 *
947 * Proper alignment of the 128-bit operand is enforced.
948 * Exceptions type 2. SSE2 cpuid checks.
949 *
950 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
951 */
952FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
953{
954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
955 if (IEM_IS_MODRM_REG_MODE(bRm))
956 {
957 /*
958 * Register, register.
959 */
960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
961 IEM_MC_BEGIN(3, 1);
962 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
963 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
964 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
965 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
966 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
967 IEM_MC_PREPARE_SSE_USAGE();
968 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
970 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
971 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
972 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
973
974 IEM_MC_ADVANCE_RIP();
975 IEM_MC_END();
976 }
977 else
978 {
979 /*
980 * Register, memory.
981 */
982 IEM_MC_BEGIN(3, 2);
983 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
984 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
985 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
986 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
987 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
989
990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
992 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
993 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
994
995 IEM_MC_PREPARE_SSE_USAGE();
996 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
997 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
998 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
999 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1000
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 }
1004 return VINF_SUCCESS;
1005}
1006
1007
1008/**
1009 * Common worker for SSE2 instructions on the form:
1010 * pxxxx xmm1, xmm2/mem128
1011 *
1012 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1013 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1014 *
1015 * Exceptions type 4.
1016 */
1017FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1018{
1019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1020 if (IEM_IS_MODRM_REG_MODE(bRm))
1021 {
1022 /*
1023 * Register, register.
1024 */
1025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1026 IEM_MC_BEGIN(2, 0);
1027 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1028 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1029 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1030 IEM_MC_PREPARE_SSE_USAGE();
1031 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1032 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1033 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1034 IEM_MC_ADVANCE_RIP();
1035 IEM_MC_END();
1036 }
1037 else
1038 {
1039 /*
1040 * Register, memory.
1041 */
1042 IEM_MC_BEGIN(2, 2);
1043 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1044 IEM_MC_LOCAL(RTUINT128U, uSrc);
1045 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1047
1048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1051 /** @todo Most CPUs probably only read the high qword. We read everything to
1052 * make sure we apply segmentation and alignment checks correctly.
1053 * When we have time, it would be interesting to explore what real
1054 * CPUs actually does and whether it will do a TLB load for the lower
1055 * part or skip any associated \#PF. */
1056 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1057
1058 IEM_MC_PREPARE_SSE_USAGE();
1059 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1061
1062 IEM_MC_ADVANCE_RIP();
1063 IEM_MC_END();
1064 }
1065 return VINF_SUCCESS;
1066}
1067
1068
1069/**
1070 * Common worker for SSE3 instructions on the forms:
1071 * hxxx xmm1, xmm2/mem128
1072 *
1073 * Proper alignment of the 128-bit operand is enforced.
1074 * Exceptions type 2. SSE3 cpuid checks.
1075 *
1076 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1077 */
1078FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1079{
1080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1081 if (IEM_IS_MODRM_REG_MODE(bRm))
1082 {
1083 /*
1084 * Register, register.
1085 */
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1087 IEM_MC_BEGIN(3, 1);
1088 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1089 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1090 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1091 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1092 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1093 IEM_MC_PREPARE_SSE_USAGE();
1094 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1095 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1096 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1097 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1098 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1099
1100 IEM_MC_ADVANCE_RIP();
1101 IEM_MC_END();
1102 }
1103 else
1104 {
1105 /*
1106 * Register, memory.
1107 */
1108 IEM_MC_BEGIN(3, 2);
1109 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1110 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1111 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1112 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1113 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1115
1116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1118 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1119 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1120
1121 IEM_MC_PREPARE_SSE_USAGE();
1122 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1123 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1124 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1125 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1126
1127 IEM_MC_ADVANCE_RIP();
1128 IEM_MC_END();
1129 }
1130 return VINF_SUCCESS;
1131}
1132
1133
1134/** Opcode 0x0f 0x00 /0. */
1135FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1136{
1137 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1138 IEMOP_HLP_MIN_286();
1139 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1140
1141 if (IEM_IS_MODRM_REG_MODE(bRm))
1142 {
1143 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1144 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1145 }
1146
1147 /* Ignore operand size here, memory refs are always 16-bit. */
1148 IEM_MC_BEGIN(2, 0);
1149 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1150 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1152 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1153 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1154 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1155 IEM_MC_END();
1156 return VINF_SUCCESS;
1157}
1158
1159
1160/** Opcode 0x0f 0x00 /1. */
1161FNIEMOPRM_DEF(iemOp_Grp6_str)
1162{
1163 IEMOP_MNEMONIC(str, "str Rv/Mw");
1164 IEMOP_HLP_MIN_286();
1165 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1166
1167
1168 if (IEM_IS_MODRM_REG_MODE(bRm))
1169 {
1170 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1171 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1172 }
1173
1174 /* Ignore operand size here, memory refs are always 16-bit. */
1175 IEM_MC_BEGIN(2, 0);
1176 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1177 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1179 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1181 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1182 IEM_MC_END();
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x00 /2. */
1188FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1189{
1190 IEMOP_MNEMONIC(lldt, "lldt Ew");
1191 IEMOP_HLP_MIN_286();
1192 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1193
1194 if (IEM_IS_MODRM_REG_MODE(bRm))
1195 {
1196 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1197 IEM_MC_BEGIN(1, 0);
1198 IEM_MC_ARG(uint16_t, u16Sel, 0);
1199 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1200 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1201 IEM_MC_END();
1202 }
1203 else
1204 {
1205 IEM_MC_BEGIN(1, 1);
1206 IEM_MC_ARG(uint16_t, u16Sel, 0);
1207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1209 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1210 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1211 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1212 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1213 IEM_MC_END();
1214 }
1215 return VINF_SUCCESS;
1216}
1217
1218
1219/** Opcode 0x0f 0x00 /3. */
1220FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1221{
1222 IEMOP_MNEMONIC(ltr, "ltr Ew");
1223 IEMOP_HLP_MIN_286();
1224 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1225
1226 if (IEM_IS_MODRM_REG_MODE(bRm))
1227 {
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEM_MC_BEGIN(1, 0);
1230 IEM_MC_ARG(uint16_t, u16Sel, 0);
1231 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1232 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1233 IEM_MC_END();
1234 }
1235 else
1236 {
1237 IEM_MC_BEGIN(1, 1);
1238 IEM_MC_ARG(uint16_t, u16Sel, 0);
1239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1242 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1243 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1244 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1245 IEM_MC_END();
1246 }
1247 return VINF_SUCCESS;
1248}
1249
1250
1251/** Opcode 0x0f 0x00 /3. */
1252FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1253{
1254 IEMOP_HLP_MIN_286();
1255 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1256
1257 if (IEM_IS_MODRM_REG_MODE(bRm))
1258 {
1259 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1260 IEM_MC_BEGIN(2, 0);
1261 IEM_MC_ARG(uint16_t, u16Sel, 0);
1262 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1263 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1264 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1265 IEM_MC_END();
1266 }
1267 else
1268 {
1269 IEM_MC_BEGIN(2, 1);
1270 IEM_MC_ARG(uint16_t, u16Sel, 0);
1271 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1274 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1275 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1276 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1277 IEM_MC_END();
1278 }
1279 return VINF_SUCCESS;
1280}
1281
1282
1283/** Opcode 0x0f 0x00 /4. */
1284FNIEMOPRM_DEF(iemOp_Grp6_verr)
1285{
1286 IEMOP_MNEMONIC(verr, "verr Ew");
1287 IEMOP_HLP_MIN_286();
1288 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1289}
1290
1291
1292/** Opcode 0x0f 0x00 /5. */
1293FNIEMOPRM_DEF(iemOp_Grp6_verw)
1294{
1295 IEMOP_MNEMONIC(verw, "verw Ew");
1296 IEMOP_HLP_MIN_286();
1297 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1298}
1299
1300
1301/**
1302 * Group 6 jump table.
1303 */
1304IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1305{
1306 iemOp_Grp6_sldt,
1307 iemOp_Grp6_str,
1308 iemOp_Grp6_lldt,
1309 iemOp_Grp6_ltr,
1310 iemOp_Grp6_verr,
1311 iemOp_Grp6_verw,
1312 iemOp_InvalidWithRM,
1313 iemOp_InvalidWithRM
1314};
1315
1316/** Opcode 0x0f 0x00. */
1317FNIEMOP_DEF(iemOp_Grp6)
1318{
1319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1320 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1321}
1322
1323
1324/** Opcode 0x0f 0x01 /0. */
1325FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1326{
1327 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1328 IEMOP_HLP_MIN_286();
1329 IEMOP_HLP_64BIT_OP_SIZE();
1330 IEM_MC_BEGIN(2, 1);
1331 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1332 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1335 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1336 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1337 IEM_MC_END();
1338 return VINF_SUCCESS;
1339}
1340
1341
1342/** Opcode 0x0f 0x01 /0. */
1343FNIEMOP_DEF(iemOp_Grp7_vmcall)
1344{
1345 IEMOP_MNEMONIC(vmcall, "vmcall");
1346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1347
1348 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1349 want all hypercalls regardless of instruction used, and if a
1350 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1351 (NEM/win makes ASSUMPTIONS about this behavior.) */
1352 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1353}
1354
1355
1356/** Opcode 0x0f 0x01 /0. */
1357#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1358FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1359{
1360 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1361 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1362 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1363 IEMOP_HLP_DONE_DECODING();
1364 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1365}
1366#else
1367FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1368{
1369 IEMOP_BITCH_ABOUT_STUB();
1370 return IEMOP_RAISE_INVALID_OPCODE();
1371}
1372#endif
1373
1374
1375/** Opcode 0x0f 0x01 /0. */
1376#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1377FNIEMOP_DEF(iemOp_Grp7_vmresume)
1378{
1379 IEMOP_MNEMONIC(vmresume, "vmresume");
1380 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1381 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1382 IEMOP_HLP_DONE_DECODING();
1383 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1384}
1385#else
1386FNIEMOP_DEF(iemOp_Grp7_vmresume)
1387{
1388 IEMOP_BITCH_ABOUT_STUB();
1389 return IEMOP_RAISE_INVALID_OPCODE();
1390}
1391#endif
1392
1393
1394/** Opcode 0x0f 0x01 /0. */
1395#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1396FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1397{
1398 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1399 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1400 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1401 IEMOP_HLP_DONE_DECODING();
1402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1403}
1404#else
1405FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1406{
1407 IEMOP_BITCH_ABOUT_STUB();
1408 return IEMOP_RAISE_INVALID_OPCODE();
1409}
1410#endif
1411
1412
1413/** Opcode 0x0f 0x01 /1. */
1414FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1415{
1416 IEMOP_MNEMONIC(sidt, "sidt Ms");
1417 IEMOP_HLP_MIN_286();
1418 IEMOP_HLP_64BIT_OP_SIZE();
1419 IEM_MC_BEGIN(2, 1);
1420 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1421 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1425 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1426 IEM_MC_END();
1427 return VINF_SUCCESS;
1428}
1429
1430
1431/** Opcode 0x0f 0x01 /1. */
1432FNIEMOP_DEF(iemOp_Grp7_monitor)
1433{
1434 IEMOP_MNEMONIC(monitor, "monitor");
1435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1436 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1437}
1438
1439
1440/** Opcode 0x0f 0x01 /1. */
1441FNIEMOP_DEF(iemOp_Grp7_mwait)
1442{
1443 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1446}
1447
1448
1449/** Opcode 0x0f 0x01 /2. */
1450FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1451{
1452 IEMOP_MNEMONIC(lgdt, "lgdt");
1453 IEMOP_HLP_64BIT_OP_SIZE();
1454 IEM_MC_BEGIN(3, 1);
1455 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1456 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1457 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1460 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1461 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1462 IEM_MC_END();
1463 return VINF_SUCCESS;
1464}
1465
1466
1467/** Opcode 0x0f 0x01 0xd0. */
1468FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1469{
1470 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1471 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1472 {
1473 /** @todo r=ramshankar: We should use
1474 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1475 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1476 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1477 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1478 }
1479 return IEMOP_RAISE_INVALID_OPCODE();
1480}
1481
1482
1483/** Opcode 0x0f 0x01 0xd1. */
1484FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1485{
1486 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1487 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1488 {
1489 /** @todo r=ramshankar: We should use
1490 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1491 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1492 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1494 }
1495 return IEMOP_RAISE_INVALID_OPCODE();
1496}
1497
1498
1499/** Opcode 0x0f 0x01 /3. */
1500FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1501{
1502 IEMOP_MNEMONIC(lidt, "lidt");
1503 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1504 ? IEMMODE_64BIT
1505 : pVCpu->iem.s.enmEffOpSize;
1506 IEM_MC_BEGIN(3, 1);
1507 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1508 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1509 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1512 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1513 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1514 IEM_MC_END();
1515 return VINF_SUCCESS;
1516}
1517
1518
1519/** Opcode 0x0f 0x01 0xd8. */
1520#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1521FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1522{
1523 IEMOP_MNEMONIC(vmrun, "vmrun");
1524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1525 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1526}
1527#else
1528FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1529#endif
1530
1531/** Opcode 0x0f 0x01 0xd9. */
1532FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1533{
1534 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1536
1537 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1538 want all hypercalls regardless of instruction used, and if a
1539 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1540 (NEM/win makes ASSUMPTIONS about this behavior.) */
1541 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1542}
1543
1544/** Opcode 0x0f 0x01 0xda. */
1545#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1546FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1547{
1548 IEMOP_MNEMONIC(vmload, "vmload");
1549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1550 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1551}
1552#else
1553FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1554#endif
1555
1556
1557/** Opcode 0x0f 0x01 0xdb. */
1558#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1559FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1560{
1561 IEMOP_MNEMONIC(vmsave, "vmsave");
1562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1563 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1564}
1565#else
1566FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1567#endif
1568
1569
1570/** Opcode 0x0f 0x01 0xdc. */
1571#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1572FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1573{
1574 IEMOP_MNEMONIC(stgi, "stgi");
1575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1576 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1577}
1578#else
1579FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1580#endif
1581
1582
1583/** Opcode 0x0f 0x01 0xdd. */
1584#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1585FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1586{
1587 IEMOP_MNEMONIC(clgi, "clgi");
1588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1589 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1590}
1591#else
1592FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1593#endif
1594
1595
1596/** Opcode 0x0f 0x01 0xdf. */
1597#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1598FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1599{
1600 IEMOP_MNEMONIC(invlpga, "invlpga");
1601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1602 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1603}
1604#else
1605FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1606#endif
1607
1608
1609/** Opcode 0x0f 0x01 0xde. */
1610#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1611FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1612{
1613 IEMOP_MNEMONIC(skinit, "skinit");
1614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1615 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1616}
1617#else
1618FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1619#endif
1620
1621
1622/** Opcode 0x0f 0x01 /4. */
1623FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1624{
1625 IEMOP_MNEMONIC(smsw, "smsw");
1626 IEMOP_HLP_MIN_286();
1627 if (IEM_IS_MODRM_REG_MODE(bRm))
1628 {
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1631 }
1632
1633 /* Ignore operand size here, memory refs are always 16-bit. */
1634 IEM_MC_BEGIN(2, 0);
1635 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1636 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1639 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1640 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1641 IEM_MC_END();
1642 return VINF_SUCCESS;
1643}
1644
1645
1646/** Opcode 0x0f 0x01 /6. */
1647FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1648{
1649 /* The operand size is effectively ignored, all is 16-bit and only the
1650 lower 3-bits are used. */
1651 IEMOP_MNEMONIC(lmsw, "lmsw");
1652 IEMOP_HLP_MIN_286();
1653 if (IEM_IS_MODRM_REG_MODE(bRm))
1654 {
1655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1656 IEM_MC_BEGIN(2, 0);
1657 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1658 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1659 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1660 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1661 IEM_MC_END();
1662 }
1663 else
1664 {
1665 IEM_MC_BEGIN(2, 0);
1666 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1667 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1670 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1671 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x0f 0x01 /7. */
1679FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1680{
1681 IEMOP_MNEMONIC(invlpg, "invlpg");
1682 IEMOP_HLP_MIN_486();
1683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1684 IEM_MC_BEGIN(1, 1);
1685 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1687 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1688 IEM_MC_END();
1689 return VINF_SUCCESS;
1690}
1691
1692
1693/** Opcode 0x0f 0x01 /7. */
1694FNIEMOP_DEF(iemOp_Grp7_swapgs)
1695{
1696 IEMOP_MNEMONIC(swapgs, "swapgs");
1697 IEMOP_HLP_ONLY_64BIT();
1698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1699 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1700}
1701
1702
1703/** Opcode 0x0f 0x01 /7. */
1704FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1705{
1706 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1708 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1709}
1710
1711
1712/**
1713 * Group 7 jump table, memory variant.
1714 */
1715IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1716{
1717 iemOp_Grp7_sgdt,
1718 iemOp_Grp7_sidt,
1719 iemOp_Grp7_lgdt,
1720 iemOp_Grp7_lidt,
1721 iemOp_Grp7_smsw,
1722 iemOp_InvalidWithRM,
1723 iemOp_Grp7_lmsw,
1724 iemOp_Grp7_invlpg
1725};
1726
1727
1728/** Opcode 0x0f 0x01. */
1729FNIEMOP_DEF(iemOp_Grp7)
1730{
1731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1732 if (IEM_IS_MODRM_MEM_MODE(bRm))
1733 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1734
1735 switch (IEM_GET_MODRM_REG_8(bRm))
1736 {
1737 case 0:
1738 switch (IEM_GET_MODRM_RM_8(bRm))
1739 {
1740 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1741 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1742 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1743 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1744 }
1745 return IEMOP_RAISE_INVALID_OPCODE();
1746
1747 case 1:
1748 switch (IEM_GET_MODRM_RM_8(bRm))
1749 {
1750 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1751 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1752 }
1753 return IEMOP_RAISE_INVALID_OPCODE();
1754
1755 case 2:
1756 switch (IEM_GET_MODRM_RM_8(bRm))
1757 {
1758 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1759 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1760 }
1761 return IEMOP_RAISE_INVALID_OPCODE();
1762
1763 case 3:
1764 switch (IEM_GET_MODRM_RM_8(bRm))
1765 {
1766 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1767 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1768 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1769 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1770 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1771 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1772 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1773 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1775 }
1776
1777 case 4:
1778 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1779
1780 case 5:
1781 return IEMOP_RAISE_INVALID_OPCODE();
1782
1783 case 6:
1784 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1785
1786 case 7:
1787 switch (IEM_GET_MODRM_RM_8(bRm))
1788 {
1789 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1790 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1791 }
1792 return IEMOP_RAISE_INVALID_OPCODE();
1793
1794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1795 }
1796}
1797
1798/** Opcode 0x0f 0x00 /3. */
1799FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1800{
1801 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1803
1804 if (IEM_IS_MODRM_REG_MODE(bRm))
1805 {
1806 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1807 switch (pVCpu->iem.s.enmEffOpSize)
1808 {
1809 case IEMMODE_16BIT:
1810 {
1811 IEM_MC_BEGIN(3, 0);
1812 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1813 IEM_MC_ARG(uint16_t, u16Sel, 1);
1814 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1815
1816 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1817 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1818 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1819
1820 IEM_MC_END();
1821 return VINF_SUCCESS;
1822 }
1823
1824 case IEMMODE_32BIT:
1825 case IEMMODE_64BIT:
1826 {
1827 IEM_MC_BEGIN(3, 0);
1828 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1829 IEM_MC_ARG(uint16_t, u16Sel, 1);
1830 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1831
1832 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1833 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1834 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1835
1836 IEM_MC_END();
1837 return VINF_SUCCESS;
1838 }
1839
1840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1841 }
1842 }
1843 else
1844 {
1845 switch (pVCpu->iem.s.enmEffOpSize)
1846 {
1847 case IEMMODE_16BIT:
1848 {
1849 IEM_MC_BEGIN(3, 1);
1850 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1851 IEM_MC_ARG(uint16_t, u16Sel, 1);
1852 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1854
1855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1856 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1857
1858 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1859 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1860 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1861
1862 IEM_MC_END();
1863 return VINF_SUCCESS;
1864 }
1865
1866 case IEMMODE_32BIT:
1867 case IEMMODE_64BIT:
1868 {
1869 IEM_MC_BEGIN(3, 1);
1870 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1871 IEM_MC_ARG(uint16_t, u16Sel, 1);
1872 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1874
1875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1876 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1877/** @todo testcase: make sure it's a 16-bit read. */
1878
1879 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1881 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1882
1883 IEM_MC_END();
1884 return VINF_SUCCESS;
1885 }
1886
1887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1888 }
1889 }
1890}
1891
1892
1893
1894/** Opcode 0x0f 0x02. */
1895FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1896{
1897 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1898 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1899}
1900
1901
1902/** Opcode 0x0f 0x03. */
1903FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1904{
1905 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1906 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1907}
1908
1909
1910/** Opcode 0x0f 0x05. */
1911FNIEMOP_DEF(iemOp_syscall)
1912{
1913 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1915 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1916}
1917
1918
1919/** Opcode 0x0f 0x06. */
1920FNIEMOP_DEF(iemOp_clts)
1921{
1922 IEMOP_MNEMONIC(clts, "clts");
1923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1924 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1925}
1926
1927
1928/** Opcode 0x0f 0x07. */
1929FNIEMOP_DEF(iemOp_sysret)
1930{
1931 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1933 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1934}
1935
1936
1937/** Opcode 0x0f 0x08. */
1938FNIEMOP_DEF(iemOp_invd)
1939{
1940 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1941 IEMOP_HLP_MIN_486();
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1944}
1945
1946
1947/** Opcode 0x0f 0x09. */
1948FNIEMOP_DEF(iemOp_wbinvd)
1949{
1950 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1951 IEMOP_HLP_MIN_486();
1952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1953 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1954}
1955
1956
1957/** Opcode 0x0f 0x0b. */
1958FNIEMOP_DEF(iemOp_ud2)
1959{
1960 IEMOP_MNEMONIC(ud2, "ud2");
1961 return IEMOP_RAISE_INVALID_OPCODE();
1962}
1963
1964/** Opcode 0x0f 0x0d. */
1965FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1966{
1967 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1968 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1969 {
1970 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1971 return IEMOP_RAISE_INVALID_OPCODE();
1972 }
1973
1974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1975 if (IEM_IS_MODRM_REG_MODE(bRm))
1976 {
1977 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1978 return IEMOP_RAISE_INVALID_OPCODE();
1979 }
1980
1981 switch (IEM_GET_MODRM_REG_8(bRm))
1982 {
1983 case 2: /* Aliased to /0 for the time being. */
1984 case 4: /* Aliased to /0 for the time being. */
1985 case 5: /* Aliased to /0 for the time being. */
1986 case 6: /* Aliased to /0 for the time being. */
1987 case 7: /* Aliased to /0 for the time being. */
1988 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1989 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1990 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1992 }
1993
1994 IEM_MC_BEGIN(0, 1);
1995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1998 /* Currently a NOP. */
1999 NOREF(GCPtrEffSrc);
2000 IEM_MC_ADVANCE_RIP();
2001 IEM_MC_END();
2002 return VINF_SUCCESS;
2003}
2004
2005
2006/** Opcode 0x0f 0x0e. */
2007FNIEMOP_DEF(iemOp_femms)
2008{
2009 IEMOP_MNEMONIC(femms, "femms");
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011
2012 IEM_MC_BEGIN(0,0);
2013 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2014 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2015 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2016 IEM_MC_FPU_FROM_MMX_MODE();
2017 IEM_MC_ADVANCE_RIP();
2018 IEM_MC_END();
2019 return VINF_SUCCESS;
2020}
2021
2022
2023/** Opcode 0x0f 0x0f. */
2024FNIEMOP_DEF(iemOp_3Dnow)
2025{
2026 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2027 {
2028 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2029 return IEMOP_RAISE_INVALID_OPCODE();
2030 }
2031
2032#ifdef IEM_WITH_3DNOW
2033 /* This is pretty sparse, use switch instead of table. */
2034 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2035 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2036#else
2037 IEMOP_BITCH_ABOUT_STUB();
2038 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2039#endif
2040}
2041
2042
2043/**
2044 * @opcode 0x10
2045 * @oppfx none
2046 * @opcpuid sse
2047 * @opgroup og_sse_simdfp_datamove
2048 * @opxcpttype 4UA
2049 * @optest op1=1 op2=2 -> op1=2
2050 * @optest op1=0 op2=-22 -> op1=-22
2051 */
2052FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2053{
2054 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if (IEM_IS_MODRM_REG_MODE(bRm))
2057 {
2058 /*
2059 * Register, register.
2060 */
2061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2062 IEM_MC_BEGIN(0, 0);
2063 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2064 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2065 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2066 IEM_GET_MODRM_RM(pVCpu, bRm));
2067 IEM_MC_ADVANCE_RIP();
2068 IEM_MC_END();
2069 }
2070 else
2071 {
2072 /*
2073 * Memory, register.
2074 */
2075 IEM_MC_BEGIN(0, 2);
2076 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2081 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2083
2084 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2085 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2086
2087 IEM_MC_ADVANCE_RIP();
2088 IEM_MC_END();
2089 }
2090 return VINF_SUCCESS;
2091
2092}
2093
2094
2095/**
2096 * @opcode 0x10
2097 * @oppfx 0x66
2098 * @opcpuid sse2
2099 * @opgroup og_sse2_pcksclr_datamove
2100 * @opxcpttype 4UA
2101 * @optest op1=1 op2=2 -> op1=2
2102 * @optest op1=0 op2=-42 -> op1=-42
2103 */
2104FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2105{
2106 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2108 if (IEM_IS_MODRM_REG_MODE(bRm))
2109 {
2110 /*
2111 * Register, register.
2112 */
2113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2114 IEM_MC_BEGIN(0, 0);
2115 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2116 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2117 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2118 IEM_GET_MODRM_RM(pVCpu, bRm));
2119 IEM_MC_ADVANCE_RIP();
2120 IEM_MC_END();
2121 }
2122 else
2123 {
2124 /*
2125 * Memory, register.
2126 */
2127 IEM_MC_BEGIN(0, 2);
2128 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2130
2131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2133 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2135
2136 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2137 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2138
2139 IEM_MC_ADVANCE_RIP();
2140 IEM_MC_END();
2141 }
2142 return VINF_SUCCESS;
2143}
2144
2145
2146/**
2147 * @opcode 0x10
2148 * @oppfx 0xf3
2149 * @opcpuid sse
2150 * @opgroup og_sse_simdfp_datamove
2151 * @opxcpttype 5
2152 * @optest op1=1 op2=2 -> op1=2
2153 * @optest op1=0 op2=-22 -> op1=-22
2154 */
2155FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2156{
2157 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2159 if (IEM_IS_MODRM_REG_MODE(bRm))
2160 {
2161 /*
2162 * Register, register.
2163 */
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 IEM_MC_BEGIN(0, 1);
2166 IEM_MC_LOCAL(uint32_t, uSrc);
2167
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2170 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2171 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2172
2173 IEM_MC_ADVANCE_RIP();
2174 IEM_MC_END();
2175 }
2176 else
2177 {
2178 /*
2179 * Memory, register.
2180 */
2181 IEM_MC_BEGIN(0, 2);
2182 IEM_MC_LOCAL(uint32_t, uSrc);
2183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2184
2185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2187 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2188 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2189
2190 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2191 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2192
2193 IEM_MC_ADVANCE_RIP();
2194 IEM_MC_END();
2195 }
2196 return VINF_SUCCESS;
2197}
2198
2199
2200/**
2201 * @opcode 0x10
2202 * @oppfx 0xf2
2203 * @opcpuid sse2
2204 * @opgroup og_sse2_pcksclr_datamove
2205 * @opxcpttype 5
2206 * @optest op1=1 op2=2 -> op1=2
2207 * @optest op1=0 op2=-42 -> op1=-42
2208 */
2209FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2210{
2211 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 if (IEM_IS_MODRM_REG_MODE(bRm))
2214 {
2215 /*
2216 * Register, register.
2217 */
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_BEGIN(0, 1);
2220 IEM_MC_LOCAL(uint64_t, uSrc);
2221
2222 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2224 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2225 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2226
2227 IEM_MC_ADVANCE_RIP();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /*
2233 * Memory, register.
2234 */
2235 IEM_MC_BEGIN(0, 2);
2236 IEM_MC_LOCAL(uint64_t, uSrc);
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2243
2244 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2245 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2246
2247 IEM_MC_ADVANCE_RIP();
2248 IEM_MC_END();
2249 }
2250 return VINF_SUCCESS;
2251}
2252
2253
2254/**
2255 * @opcode 0x11
2256 * @oppfx none
2257 * @opcpuid sse
2258 * @opgroup og_sse_simdfp_datamove
2259 * @opxcpttype 4UA
2260 * @optest op1=1 op2=2 -> op1=2
2261 * @optest op1=0 op2=-42 -> op1=-42
2262 */
2263FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2264{
2265 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2267 if (IEM_IS_MODRM_REG_MODE(bRm))
2268 {
2269 /*
2270 * Register, register.
2271 */
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2273 IEM_MC_BEGIN(0, 0);
2274 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2276 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2277 IEM_GET_MODRM_REG(pVCpu, bRm));
2278 IEM_MC_ADVANCE_RIP();
2279 IEM_MC_END();
2280 }
2281 else
2282 {
2283 /*
2284 * Memory, register.
2285 */
2286 IEM_MC_BEGIN(0, 2);
2287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2289
2290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2294
2295 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2297
2298 IEM_MC_ADVANCE_RIP();
2299 IEM_MC_END();
2300 }
2301 return VINF_SUCCESS;
2302}
2303
2304
2305/**
2306 * @opcode 0x11
2307 * @oppfx 0x66
2308 * @opcpuid sse2
2309 * @opgroup og_sse2_pcksclr_datamove
2310 * @opxcpttype 4UA
2311 * @optest op1=1 op2=2 -> op1=2
2312 * @optest op1=0 op2=-42 -> op1=-42
2313 */
2314FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2315{
2316 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2318 if (IEM_IS_MODRM_REG_MODE(bRm))
2319 {
2320 /*
2321 * Register, register.
2322 */
2323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2324 IEM_MC_BEGIN(0, 0);
2325 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2326 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2327 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2328 IEM_GET_MODRM_REG(pVCpu, bRm));
2329 IEM_MC_ADVANCE_RIP();
2330 IEM_MC_END();
2331 }
2332 else
2333 {
2334 /*
2335 * Memory, register.
2336 */
2337 IEM_MC_BEGIN(0, 2);
2338 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2340
2341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2344 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2345
2346 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2347 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2348
2349 IEM_MC_ADVANCE_RIP();
2350 IEM_MC_END();
2351 }
2352 return VINF_SUCCESS;
2353}
2354
2355
2356/**
2357 * @opcode 0x11
2358 * @oppfx 0xf3
2359 * @opcpuid sse
2360 * @opgroup og_sse_simdfp_datamove
2361 * @opxcpttype 5
2362 * @optest op1=1 op2=2 -> op1=2
2363 * @optest op1=0 op2=-22 -> op1=-22
2364 */
2365FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2366{
2367 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2369 if (IEM_IS_MODRM_REG_MODE(bRm))
2370 {
2371 /*
2372 * Register, register.
2373 */
2374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2375 IEM_MC_BEGIN(0, 1);
2376 IEM_MC_LOCAL(uint32_t, uSrc);
2377
2378 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2381 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2382
2383 IEM_MC_ADVANCE_RIP();
2384 IEM_MC_END();
2385 }
2386 else
2387 {
2388 /*
2389 * Memory, register.
2390 */
2391 IEM_MC_BEGIN(0, 2);
2392 IEM_MC_LOCAL(uint32_t, uSrc);
2393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2394
2395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2397 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2398 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2399
2400 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2401 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2402
2403 IEM_MC_ADVANCE_RIP();
2404 IEM_MC_END();
2405 }
2406 return VINF_SUCCESS;
2407}
2408
2409
2410/**
2411 * @opcode 0x11
2412 * @oppfx 0xf2
2413 * @opcpuid sse2
2414 * @opgroup og_sse2_pcksclr_datamove
2415 * @opxcpttype 5
2416 * @optest op1=1 op2=2 -> op1=2
2417 * @optest op1=0 op2=-42 -> op1=-42
2418 */
2419FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2420{
2421 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2423 if (IEM_IS_MODRM_REG_MODE(bRm))
2424 {
2425 /*
2426 * Register, register.
2427 */
2428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2429 IEM_MC_BEGIN(0, 1);
2430 IEM_MC_LOCAL(uint64_t, uSrc);
2431
2432 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2434 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2435 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 else
2441 {
2442 /*
2443 * Memory, register.
2444 */
2445 IEM_MC_BEGIN(0, 2);
2446 IEM_MC_LOCAL(uint64_t, uSrc);
2447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2448
2449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2453
2454 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2455 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2456
2457 IEM_MC_ADVANCE_RIP();
2458 IEM_MC_END();
2459 }
2460 return VINF_SUCCESS;
2461}
2462
2463
2464FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2465{
2466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2467 if (IEM_IS_MODRM_REG_MODE(bRm))
2468 {
2469 /**
2470 * @opcode 0x12
2471 * @opcodesub 11 mr/reg
2472 * @oppfx none
2473 * @opcpuid sse
2474 * @opgroup og_sse_simdfp_datamove
2475 * @opxcpttype 5
2476 * @optest op1=1 op2=2 -> op1=2
2477 * @optest op1=0 op2=-42 -> op1=-42
2478 */
2479 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2480
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482 IEM_MC_BEGIN(0, 1);
2483 IEM_MC_LOCAL(uint64_t, uSrc);
2484
2485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2487 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2488 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2489
2490 IEM_MC_ADVANCE_RIP();
2491 IEM_MC_END();
2492 }
2493 else
2494 {
2495 /**
2496 * @opdone
2497 * @opcode 0x12
2498 * @opcodesub !11 mr/reg
2499 * @oppfx none
2500 * @opcpuid sse
2501 * @opgroup og_sse_simdfp_datamove
2502 * @opxcpttype 5
2503 * @optest op1=1 op2=2 -> op1=2
2504 * @optest op1=0 op2=-42 -> op1=-42
2505 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2506 */
2507 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2508
2509 IEM_MC_BEGIN(0, 2);
2510 IEM_MC_LOCAL(uint64_t, uSrc);
2511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2512
2513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2515 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2517
2518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2519 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2520
2521 IEM_MC_ADVANCE_RIP();
2522 IEM_MC_END();
2523 }
2524 return VINF_SUCCESS;
2525}
2526
2527
2528/**
2529 * @opcode 0x12
2530 * @opcodesub !11 mr/reg
2531 * @oppfx 0x66
2532 * @opcpuid sse2
2533 * @opgroup og_sse2_pcksclr_datamove
2534 * @opxcpttype 5
2535 * @optest op1=1 op2=2 -> op1=2
2536 * @optest op1=0 op2=-42 -> op1=-42
2537 */
2538FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2539{
2540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2541 if (IEM_IS_MODRM_MEM_MODE(bRm))
2542 {
2543 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2544
2545 IEM_MC_BEGIN(0, 2);
2546 IEM_MC_LOCAL(uint64_t, uSrc);
2547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2548
2549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2551 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2552 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2553
2554 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2555 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2556
2557 IEM_MC_ADVANCE_RIP();
2558 IEM_MC_END();
2559 return VINF_SUCCESS;
2560 }
2561
2562 /**
2563 * @opdone
2564 * @opmnemonic ud660f12m3
2565 * @opcode 0x12
2566 * @opcodesub 11 mr/reg
2567 * @oppfx 0x66
2568 * @opunused immediate
2569 * @opcpuid sse
2570 * @optest ->
2571 */
2572 return IEMOP_RAISE_INVALID_OPCODE();
2573}
2574
2575
2576/**
2577 * @opcode 0x12
2578 * @oppfx 0xf3
2579 * @opcpuid sse3
2580 * @opgroup og_sse3_pcksclr_datamove
2581 * @opxcpttype 4
2582 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2583 * op1=0x00000002000000020000000100000001
2584 */
2585FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2586{
2587 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2589 if (IEM_IS_MODRM_REG_MODE(bRm))
2590 {
2591 /*
2592 * Register, register.
2593 */
2594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2595 IEM_MC_BEGIN(2, 0);
2596 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2597 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2598
2599 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2600 IEM_MC_PREPARE_SSE_USAGE();
2601
2602 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2603 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2604 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2605
2606 IEM_MC_ADVANCE_RIP();
2607 IEM_MC_END();
2608 }
2609 else
2610 {
2611 /*
2612 * Register, memory.
2613 */
2614 IEM_MC_BEGIN(2, 2);
2615 IEM_MC_LOCAL(RTUINT128U, uSrc);
2616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2617 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2618 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2619
2620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2622 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2623 IEM_MC_PREPARE_SSE_USAGE();
2624
2625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2626 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2627 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2628
2629 IEM_MC_ADVANCE_RIP();
2630 IEM_MC_END();
2631 }
2632 return VINF_SUCCESS;
2633}
2634
2635
2636/**
2637 * @opcode 0x12
2638 * @oppfx 0xf2
2639 * @opcpuid sse3
2640 * @opgroup og_sse3_pcksclr_datamove
2641 * @opxcpttype 5
2642 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2643 * op1=0x22222222111111112222222211111111
2644 */
2645FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2646{
2647 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2649 if (IEM_IS_MODRM_REG_MODE(bRm))
2650 {
2651 /*
2652 * Register, register.
2653 */
2654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2655 IEM_MC_BEGIN(2, 0);
2656 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2657 IEM_MC_ARG(uint64_t, uSrc, 1);
2658
2659 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2660 IEM_MC_PREPARE_SSE_USAGE();
2661
2662 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2663 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2664 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2665
2666 IEM_MC_ADVANCE_RIP();
2667 IEM_MC_END();
2668 }
2669 else
2670 {
2671 /*
2672 * Register, memory.
2673 */
2674 IEM_MC_BEGIN(2, 2);
2675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2676 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2677 IEM_MC_ARG(uint64_t, uSrc, 1);
2678
2679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2681 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2682 IEM_MC_PREPARE_SSE_USAGE();
2683
2684 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2685 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2686 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2687
2688 IEM_MC_ADVANCE_RIP();
2689 IEM_MC_END();
2690 }
2691 return VINF_SUCCESS;
2692}
2693
2694
2695/**
2696 * @opcode 0x13
2697 * @opcodesub !11 mr/reg
2698 * @oppfx none
2699 * @opcpuid sse
2700 * @opgroup og_sse_simdfp_datamove
2701 * @opxcpttype 5
2702 * @optest op1=1 op2=2 -> op1=2
2703 * @optest op1=0 op2=-42 -> op1=-42
2704 */
2705FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2706{
2707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2708 if (IEM_IS_MODRM_MEM_MODE(bRm))
2709 {
2710 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2711
2712 IEM_MC_BEGIN(0, 2);
2713 IEM_MC_LOCAL(uint64_t, uSrc);
2714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2715
2716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2718 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2719 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2720
2721 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2722 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2723
2724 IEM_MC_ADVANCE_RIP();
2725 IEM_MC_END();
2726 return VINF_SUCCESS;
2727 }
2728
2729 /**
2730 * @opdone
2731 * @opmnemonic ud0f13m3
2732 * @opcode 0x13
2733 * @opcodesub 11 mr/reg
2734 * @oppfx none
2735 * @opunused immediate
2736 * @opcpuid sse
2737 * @optest ->
2738 */
2739 return IEMOP_RAISE_INVALID_OPCODE();
2740}
2741
2742
2743/**
2744 * @opcode 0x13
2745 * @opcodesub !11 mr/reg
2746 * @oppfx 0x66
2747 * @opcpuid sse2
2748 * @opgroup og_sse2_pcksclr_datamove
2749 * @opxcpttype 5
2750 * @optest op1=1 op2=2 -> op1=2
2751 * @optest op1=0 op2=-42 -> op1=-42
2752 */
2753FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2754{
2755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2756 if (IEM_IS_MODRM_MEM_MODE(bRm))
2757 {
2758 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2759 IEM_MC_BEGIN(0, 2);
2760 IEM_MC_LOCAL(uint64_t, uSrc);
2761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2762
2763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2765 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2766 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2767
2768 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2769 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2770
2771 IEM_MC_ADVANCE_RIP();
2772 IEM_MC_END();
2773 return VINF_SUCCESS;
2774 }
2775
2776 /**
2777 * @opdone
2778 * @opmnemonic ud660f13m3
2779 * @opcode 0x13
2780 * @opcodesub 11 mr/reg
2781 * @oppfx 0x66
2782 * @opunused immediate
2783 * @opcpuid sse
2784 * @optest ->
2785 */
2786 return IEMOP_RAISE_INVALID_OPCODE();
2787}
2788
2789
2790/**
2791 * @opmnemonic udf30f13
2792 * @opcode 0x13
2793 * @oppfx 0xf3
2794 * @opunused intel-modrm
2795 * @opcpuid sse
2796 * @optest ->
2797 * @opdone
2798 */
2799
2800/**
2801 * @opmnemonic udf20f13
2802 * @opcode 0x13
2803 * @oppfx 0xf2
2804 * @opunused intel-modrm
2805 * @opcpuid sse
2806 * @optest ->
2807 * @opdone
2808 */
2809
2810/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2811FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2812{
2813 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2814 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2815}
2816
2817
2818/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2819FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2820{
2821 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2822 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2823}
2824
2825
2826/**
2827 * @opdone
2828 * @opmnemonic udf30f14
2829 * @opcode 0x14
2830 * @oppfx 0xf3
2831 * @opunused intel-modrm
2832 * @opcpuid sse
2833 * @optest ->
2834 * @opdone
2835 */
2836
2837/**
2838 * @opmnemonic udf20f14
2839 * @opcode 0x14
2840 * @oppfx 0xf2
2841 * @opunused intel-modrm
2842 * @opcpuid sse
2843 * @optest ->
2844 * @opdone
2845 */
2846
2847/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2848FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2849{
2850 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2851 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2852}
2853
2854
2855/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2856FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2857{
2858 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2859 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2860}
2861
2862
2863/* Opcode 0xf3 0x0f 0x15 - invalid */
2864/* Opcode 0xf2 0x0f 0x15 - invalid */
2865
2866/**
2867 * @opdone
2868 * @opmnemonic udf30f15
2869 * @opcode 0x15
2870 * @oppfx 0xf3
2871 * @opunused intel-modrm
2872 * @opcpuid sse
2873 * @optest ->
2874 * @opdone
2875 */
2876
2877/**
2878 * @opmnemonic udf20f15
2879 * @opcode 0x15
2880 * @oppfx 0xf2
2881 * @opunused intel-modrm
2882 * @opcpuid sse
2883 * @optest ->
2884 * @opdone
2885 */
2886
2887FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2888{
2889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2890 if (IEM_IS_MODRM_REG_MODE(bRm))
2891 {
2892 /**
2893 * @opcode 0x16
2894 * @opcodesub 11 mr/reg
2895 * @oppfx none
2896 * @opcpuid sse
2897 * @opgroup og_sse_simdfp_datamove
2898 * @opxcpttype 5
2899 * @optest op1=1 op2=2 -> op1=2
2900 * @optest op1=0 op2=-42 -> op1=-42
2901 */
2902 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2903
2904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2905 IEM_MC_BEGIN(0, 1);
2906 IEM_MC_LOCAL(uint64_t, uSrc);
2907
2908 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2909 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2910 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2911 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2912
2913 IEM_MC_ADVANCE_RIP();
2914 IEM_MC_END();
2915 }
2916 else
2917 {
2918 /**
2919 * @opdone
2920 * @opcode 0x16
2921 * @opcodesub !11 mr/reg
2922 * @oppfx none
2923 * @opcpuid sse
2924 * @opgroup og_sse_simdfp_datamove
2925 * @opxcpttype 5
2926 * @optest op1=1 op2=2 -> op1=2
2927 * @optest op1=0 op2=-42 -> op1=-42
2928 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2929 */
2930 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2931
2932 IEM_MC_BEGIN(0, 2);
2933 IEM_MC_LOCAL(uint64_t, uSrc);
2934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2935
2936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2938 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2939 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2940
2941 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2942 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2943
2944 IEM_MC_ADVANCE_RIP();
2945 IEM_MC_END();
2946 }
2947 return VINF_SUCCESS;
2948}
2949
2950
2951/**
2952 * @opcode 0x16
2953 * @opcodesub !11 mr/reg
2954 * @oppfx 0x66
2955 * @opcpuid sse2
2956 * @opgroup og_sse2_pcksclr_datamove
2957 * @opxcpttype 5
2958 * @optest op1=1 op2=2 -> op1=2
2959 * @optest op1=0 op2=-42 -> op1=-42
2960 */
2961FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2962{
2963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2964 if (IEM_IS_MODRM_MEM_MODE(bRm))
2965 {
2966 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2967 IEM_MC_BEGIN(0, 2);
2968 IEM_MC_LOCAL(uint64_t, uSrc);
2969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2970
2971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2973 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2974 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2975
2976 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2977 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2978
2979 IEM_MC_ADVANCE_RIP();
2980 IEM_MC_END();
2981 return VINF_SUCCESS;
2982 }
2983
2984 /**
2985 * @opdone
2986 * @opmnemonic ud660f16m3
2987 * @opcode 0x16
2988 * @opcodesub 11 mr/reg
2989 * @oppfx 0x66
2990 * @opunused immediate
2991 * @opcpuid sse
2992 * @optest ->
2993 */
2994 return IEMOP_RAISE_INVALID_OPCODE();
2995}
2996
2997
2998/**
2999 * @opcode 0x16
3000 * @oppfx 0xf3
3001 * @opcpuid sse3
3002 * @opgroup og_sse3_pcksclr_datamove
3003 * @opxcpttype 4
3004 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3005 * op1=0x00000002000000020000000100000001
3006 */
3007FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3008{
3009 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3011 if (IEM_IS_MODRM_REG_MODE(bRm))
3012 {
3013 /*
3014 * Register, register.
3015 */
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3017 IEM_MC_BEGIN(2, 0);
3018 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3019 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3020
3021 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3022 IEM_MC_PREPARE_SSE_USAGE();
3023
3024 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3025 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3026 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3027
3028 IEM_MC_ADVANCE_RIP();
3029 IEM_MC_END();
3030 }
3031 else
3032 {
3033 /*
3034 * Register, memory.
3035 */
3036 IEM_MC_BEGIN(2, 2);
3037 IEM_MC_LOCAL(RTUINT128U, uSrc);
3038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3039 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3040 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3041
3042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3044 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3045 IEM_MC_PREPARE_SSE_USAGE();
3046
3047 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3048 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3049 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3050
3051 IEM_MC_ADVANCE_RIP();
3052 IEM_MC_END();
3053 }
3054 return VINF_SUCCESS;
3055}
3056
3057/**
3058 * @opdone
3059 * @opmnemonic udf30f16
3060 * @opcode 0x16
3061 * @oppfx 0xf2
3062 * @opunused intel-modrm
3063 * @opcpuid sse
3064 * @optest ->
3065 * @opdone
3066 */
3067
3068
3069/**
3070 * @opcode 0x17
3071 * @opcodesub !11 mr/reg
3072 * @oppfx none
3073 * @opcpuid sse
3074 * @opgroup og_sse_simdfp_datamove
3075 * @opxcpttype 5
3076 * @optest op1=1 op2=2 -> op1=2
3077 * @optest op1=0 op2=-42 -> op1=-42
3078 */
3079FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3080{
3081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3082 if (IEM_IS_MODRM_MEM_MODE(bRm))
3083 {
3084 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3085
3086 IEM_MC_BEGIN(0, 2);
3087 IEM_MC_LOCAL(uint64_t, uSrc);
3088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3089
3090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3092 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3093 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3094
3095 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3096 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3097
3098 IEM_MC_ADVANCE_RIP();
3099 IEM_MC_END();
3100 return VINF_SUCCESS;
3101 }
3102
3103 /**
3104 * @opdone
3105 * @opmnemonic ud0f17m3
3106 * @opcode 0x17
3107 * @opcodesub 11 mr/reg
3108 * @oppfx none
3109 * @opunused immediate
3110 * @opcpuid sse
3111 * @optest ->
3112 */
3113 return IEMOP_RAISE_INVALID_OPCODE();
3114}
3115
3116
3117/**
3118 * @opcode 0x17
3119 * @opcodesub !11 mr/reg
3120 * @oppfx 0x66
3121 * @opcpuid sse2
3122 * @opgroup og_sse2_pcksclr_datamove
3123 * @opxcpttype 5
3124 * @optest op1=1 op2=2 -> op1=2
3125 * @optest op1=0 op2=-42 -> op1=-42
3126 */
3127FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3128{
3129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3130 if (IEM_IS_MODRM_MEM_MODE(bRm))
3131 {
3132 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3133
3134 IEM_MC_BEGIN(0, 2);
3135 IEM_MC_LOCAL(uint64_t, uSrc);
3136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3137
3138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3140 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3141 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3142
3143 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3144 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3145
3146 IEM_MC_ADVANCE_RIP();
3147 IEM_MC_END();
3148 return VINF_SUCCESS;
3149 }
3150
3151 /**
3152 * @opdone
3153 * @opmnemonic ud660f17m3
3154 * @opcode 0x17
3155 * @opcodesub 11 mr/reg
3156 * @oppfx 0x66
3157 * @opunused immediate
3158 * @opcpuid sse
3159 * @optest ->
3160 */
3161 return IEMOP_RAISE_INVALID_OPCODE();
3162}
3163
3164
3165/**
3166 * @opdone
3167 * @opmnemonic udf30f17
3168 * @opcode 0x17
3169 * @oppfx 0xf3
3170 * @opunused intel-modrm
3171 * @opcpuid sse
3172 * @optest ->
3173 * @opdone
3174 */
3175
3176/**
3177 * @opmnemonic udf20f17
3178 * @opcode 0x17
3179 * @oppfx 0xf2
3180 * @opunused intel-modrm
3181 * @opcpuid sse
3182 * @optest ->
3183 * @opdone
3184 */
3185
3186
3187/** Opcode 0x0f 0x18. */
3188FNIEMOP_DEF(iemOp_prefetch_Grp16)
3189{
3190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3191 if (IEM_IS_MODRM_MEM_MODE(bRm))
3192 {
3193 switch (IEM_GET_MODRM_REG_8(bRm))
3194 {
3195 case 4: /* Aliased to /0 for the time being according to AMD. */
3196 case 5: /* Aliased to /0 for the time being according to AMD. */
3197 case 6: /* Aliased to /0 for the time being according to AMD. */
3198 case 7: /* Aliased to /0 for the time being according to AMD. */
3199 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3200 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3201 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3202 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3204 }
3205
3206 IEM_MC_BEGIN(0, 1);
3207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3210 /* Currently a NOP. */
3211 NOREF(GCPtrEffSrc);
3212 IEM_MC_ADVANCE_RIP();
3213 IEM_MC_END();
3214 return VINF_SUCCESS;
3215 }
3216
3217 return IEMOP_RAISE_INVALID_OPCODE();
3218}
3219
3220
3221/** Opcode 0x0f 0x19..0x1f. */
3222FNIEMOP_DEF(iemOp_nop_Ev)
3223{
3224 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3226 if (IEM_IS_MODRM_REG_MODE(bRm))
3227 {
3228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3229 IEM_MC_BEGIN(0, 0);
3230 IEM_MC_ADVANCE_RIP();
3231 IEM_MC_END();
3232 }
3233 else
3234 {
3235 IEM_MC_BEGIN(0, 1);
3236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3239 /* Currently a NOP. */
3240 NOREF(GCPtrEffSrc);
3241 IEM_MC_ADVANCE_RIP();
3242 IEM_MC_END();
3243 }
3244 return VINF_SUCCESS;
3245}
3246
3247
3248/** Opcode 0x0f 0x20. */
3249FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3250{
3251 /* mod is ignored, as is operand size overrides. */
3252 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3253 IEMOP_HLP_MIN_386();
3254 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3255 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3256 else
3257 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3258
3259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3260 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3261 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3262 {
3263 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3264 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3265 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3266 iCrReg |= 8;
3267 }
3268 switch (iCrReg)
3269 {
3270 case 0: case 2: case 3: case 4: case 8:
3271 break;
3272 default:
3273 return IEMOP_RAISE_INVALID_OPCODE();
3274 }
3275 IEMOP_HLP_DONE_DECODING();
3276
3277 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3278}
3279
3280
3281/** Opcode 0x0f 0x21. */
3282FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3283{
3284 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3285 IEMOP_HLP_MIN_386();
3286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3288 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3289 return IEMOP_RAISE_INVALID_OPCODE();
3290 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3291 IEM_GET_MODRM_RM(pVCpu, bRm),
3292 IEM_GET_MODRM_REG_8(bRm));
3293}
3294
3295
3296/** Opcode 0x0f 0x22. */
3297FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3298{
3299 /* mod is ignored, as is operand size overrides. */
3300 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3301 IEMOP_HLP_MIN_386();
3302 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3303 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3304 else
3305 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3306
3307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3308 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3310 {
3311 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3312 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3313 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3314 iCrReg |= 8;
3315 }
3316 switch (iCrReg)
3317 {
3318 case 0: case 2: case 3: case 4: case 8:
3319 break;
3320 default:
3321 return IEMOP_RAISE_INVALID_OPCODE();
3322 }
3323 IEMOP_HLP_DONE_DECODING();
3324
3325 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3326}
3327
3328
3329/** Opcode 0x0f 0x23. */
3330FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3331{
3332 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3333 IEMOP_HLP_MIN_386();
3334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3337 return IEMOP_RAISE_INVALID_OPCODE();
3338 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3339 IEM_GET_MODRM_REG_8(bRm),
3340 IEM_GET_MODRM_RM(pVCpu, bRm));
3341}
3342
3343
3344/** Opcode 0x0f 0x24. */
3345FNIEMOP_DEF(iemOp_mov_Rd_Td)
3346{
3347 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3348 IEMOP_HLP_MIN_386();
3349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3351 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3352 return IEMOP_RAISE_INVALID_OPCODE();
3353 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3354 IEM_GET_MODRM_RM(pVCpu, bRm),
3355 IEM_GET_MODRM_REG_8(bRm));
3356}
3357
3358
3359/** Opcode 0x0f 0x26. */
3360FNIEMOP_DEF(iemOp_mov_Td_Rd)
3361{
3362 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3363 IEMOP_HLP_MIN_386();
3364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3366 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3367 return IEMOP_RAISE_INVALID_OPCODE();
3368 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3369 IEM_GET_MODRM_REG_8(bRm),
3370 IEM_GET_MODRM_RM(pVCpu, bRm));
3371}
3372
3373
3374/**
3375 * @opcode 0x28
3376 * @oppfx none
3377 * @opcpuid sse
3378 * @opgroup og_sse_simdfp_datamove
3379 * @opxcpttype 1
3380 * @optest op1=1 op2=2 -> op1=2
3381 * @optest op1=0 op2=-42 -> op1=-42
3382 */
3383FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3384{
3385 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3387 if (IEM_IS_MODRM_REG_MODE(bRm))
3388 {
3389 /*
3390 * Register, register.
3391 */
3392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3393 IEM_MC_BEGIN(0, 0);
3394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3396 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3397 IEM_GET_MODRM_RM(pVCpu, bRm));
3398 IEM_MC_ADVANCE_RIP();
3399 IEM_MC_END();
3400 }
3401 else
3402 {
3403 /*
3404 * Register, memory.
3405 */
3406 IEM_MC_BEGIN(0, 2);
3407 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3409
3410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3412 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3413 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3414
3415 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3416 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3417
3418 IEM_MC_ADVANCE_RIP();
3419 IEM_MC_END();
3420 }
3421 return VINF_SUCCESS;
3422}
3423
3424/**
3425 * @opcode 0x28
3426 * @oppfx 66
3427 * @opcpuid sse2
3428 * @opgroup og_sse2_pcksclr_datamove
3429 * @opxcpttype 1
3430 * @optest op1=1 op2=2 -> op1=2
3431 * @optest op1=0 op2=-42 -> op1=-42
3432 */
3433FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3434{
3435 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3437 if (IEM_IS_MODRM_REG_MODE(bRm))
3438 {
3439 /*
3440 * Register, register.
3441 */
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3443 IEM_MC_BEGIN(0, 0);
3444 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3446 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3447 IEM_GET_MODRM_RM(pVCpu, bRm));
3448 IEM_MC_ADVANCE_RIP();
3449 IEM_MC_END();
3450 }
3451 else
3452 {
3453 /*
3454 * Register, memory.
3455 */
3456 IEM_MC_BEGIN(0, 2);
3457 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3459
3460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3462 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3463 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3464
3465 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3466 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3467
3468 IEM_MC_ADVANCE_RIP();
3469 IEM_MC_END();
3470 }
3471 return VINF_SUCCESS;
3472}
3473
3474/* Opcode 0xf3 0x0f 0x28 - invalid */
3475/* Opcode 0xf2 0x0f 0x28 - invalid */
3476
3477/**
3478 * @opcode 0x29
3479 * @oppfx none
3480 * @opcpuid sse
3481 * @opgroup og_sse_simdfp_datamove
3482 * @opxcpttype 1
3483 * @optest op1=1 op2=2 -> op1=2
3484 * @optest op1=0 op2=-42 -> op1=-42
3485 */
3486FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3487{
3488 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3490 if (IEM_IS_MODRM_REG_MODE(bRm))
3491 {
3492 /*
3493 * Register, register.
3494 */
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_BEGIN(0, 0);
3497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3499 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3500 IEM_GET_MODRM_REG(pVCpu, bRm));
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 else
3505 {
3506 /*
3507 * Memory, register.
3508 */
3509 IEM_MC_BEGIN(0, 2);
3510 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3512
3513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3517
3518 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3519 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3520
3521 IEM_MC_ADVANCE_RIP();
3522 IEM_MC_END();
3523 }
3524 return VINF_SUCCESS;
3525}
3526
3527/**
3528 * @opcode 0x29
3529 * @oppfx 66
3530 * @opcpuid sse2
3531 * @opgroup og_sse2_pcksclr_datamove
3532 * @opxcpttype 1
3533 * @optest op1=1 op2=2 -> op1=2
3534 * @optest op1=0 op2=-42 -> op1=-42
3535 */
3536FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3537{
3538 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3540 if (IEM_IS_MODRM_REG_MODE(bRm))
3541 {
3542 /*
3543 * Register, register.
3544 */
3545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3546 IEM_MC_BEGIN(0, 0);
3547 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3549 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3550 IEM_GET_MODRM_REG(pVCpu, bRm));
3551 IEM_MC_ADVANCE_RIP();
3552 IEM_MC_END();
3553 }
3554 else
3555 {
3556 /*
3557 * Memory, register.
3558 */
3559 IEM_MC_BEGIN(0, 2);
3560 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3562
3563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3565 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3566 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3567
3568 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3569 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3570
3571 IEM_MC_ADVANCE_RIP();
3572 IEM_MC_END();
3573 }
3574 return VINF_SUCCESS;
3575}
3576
3577/* Opcode 0xf3 0x0f 0x29 - invalid */
3578/* Opcode 0xf2 0x0f 0x29 - invalid */
3579
3580
3581/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3582FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3583/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3584FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3585
3586
3587/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3588FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3589{
3590 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3591
3592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3593 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3594 {
3595 if (IEM_IS_MODRM_REG_MODE(bRm))
3596 {
3597 /* XMM, greg64 */
3598 IEM_MC_BEGIN(3, 4);
3599 IEM_MC_LOCAL(uint32_t, fMxcsr);
3600 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3601 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3602 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3603 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3604
3605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3606 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3607 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3608
3609 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3610 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3611 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3612 IEM_MC_IF_MXCSR_XCPT_PENDING()
3613 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3614 IEM_MC_ELSE()
3615 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3616 IEM_MC_ENDIF();
3617
3618 IEM_MC_ADVANCE_RIP();
3619 IEM_MC_END();
3620 }
3621 else
3622 {
3623 /* XMM, [mem64] */
3624 IEM_MC_BEGIN(3, 4);
3625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3626 IEM_MC_LOCAL(uint32_t, fMxcsr);
3627 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3628 IEM_MC_LOCAL(int64_t, i64Src);
3629 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3630 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3631 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3632
3633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3635 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3636 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3637
3638 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3639 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3640 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3641 IEM_MC_IF_MXCSR_XCPT_PENDING()
3642 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3643 IEM_MC_ELSE()
3644 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3645 IEM_MC_ENDIF();
3646
3647 IEM_MC_ADVANCE_RIP();
3648 IEM_MC_END();
3649 }
3650 }
3651 else
3652 {
3653 if (IEM_IS_MODRM_REG_MODE(bRm))
3654 {
3655 /* greg, XMM */
3656 IEM_MC_BEGIN(3, 4);
3657 IEM_MC_LOCAL(uint32_t, fMxcsr);
3658 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3659 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3660 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3661 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3662
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3665 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3666
3667 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3668 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3669 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3670 IEM_MC_IF_MXCSR_XCPT_PENDING()
3671 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3672 IEM_MC_ELSE()
3673 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3674 IEM_MC_ENDIF();
3675
3676 IEM_MC_ADVANCE_RIP();
3677 IEM_MC_END();
3678 }
3679 else
3680 {
3681 /* greg, [mem] */
3682 IEM_MC_BEGIN(3, 4);
3683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3684 IEM_MC_LOCAL(uint32_t, fMxcsr);
3685 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3686 IEM_MC_LOCAL(int32_t, i32Src);
3687 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3688 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3689 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3690
3691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3693 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3694 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3695
3696 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3697 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3698 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3699 IEM_MC_IF_MXCSR_XCPT_PENDING()
3700 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3701 IEM_MC_ELSE()
3702 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3703 IEM_MC_ENDIF();
3704
3705 IEM_MC_ADVANCE_RIP();
3706 IEM_MC_END();
3707 }
3708 }
3709 return VINF_SUCCESS;
3710}
3711
3712
3713/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3714FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3715{
3716 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3717
3718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3719 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3720 {
3721 if (IEM_IS_MODRM_REG_MODE(bRm))
3722 {
3723 /* XMM, greg64 */
3724 IEM_MC_BEGIN(3, 4);
3725 IEM_MC_LOCAL(uint32_t, fMxcsr);
3726 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3727 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3728 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3729 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3730
3731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3732 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3733 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3734
3735 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3736 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3737 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3738 IEM_MC_IF_MXCSR_XCPT_PENDING()
3739 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3740 IEM_MC_ELSE()
3741 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3742 IEM_MC_ENDIF();
3743
3744 IEM_MC_ADVANCE_RIP();
3745 IEM_MC_END();
3746 }
3747 else
3748 {
3749 /* XMM, [mem64] */
3750 IEM_MC_BEGIN(3, 4);
3751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3752 IEM_MC_LOCAL(uint32_t, fMxcsr);
3753 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3754 IEM_MC_LOCAL(int64_t, i64Src);
3755 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3756 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3757 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3758
3759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3761 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3762 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3763
3764 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3765 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3766 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3767 IEM_MC_IF_MXCSR_XCPT_PENDING()
3768 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3769 IEM_MC_ELSE()
3770 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3771 IEM_MC_ENDIF();
3772
3773 IEM_MC_ADVANCE_RIP();
3774 IEM_MC_END();
3775 }
3776 }
3777 else
3778 {
3779 if (IEM_IS_MODRM_REG_MODE(bRm))
3780 {
3781 /* greg, XMM */
3782 IEM_MC_BEGIN(3, 4);
3783 IEM_MC_LOCAL(uint32_t, fMxcsr);
3784 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3785 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3786 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3787 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3788
3789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3790 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3791 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3792
3793 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3794 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3795 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3796 IEM_MC_IF_MXCSR_XCPT_PENDING()
3797 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3798 IEM_MC_ELSE()
3799 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3800 IEM_MC_ENDIF();
3801
3802 IEM_MC_ADVANCE_RIP();
3803 IEM_MC_END();
3804 }
3805 else
3806 {
3807 /* greg, [mem] */
3808 IEM_MC_BEGIN(3, 4);
3809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3810 IEM_MC_LOCAL(uint32_t, fMxcsr);
3811 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3812 IEM_MC_LOCAL(int32_t, i32Src);
3813 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3814 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3815 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3816
3817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3819 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3820 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3821
3822 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3823 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3824 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3825 IEM_MC_IF_MXCSR_XCPT_PENDING()
3826 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3827 IEM_MC_ELSE()
3828 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3829 IEM_MC_ENDIF();
3830
3831 IEM_MC_ADVANCE_RIP();
3832 IEM_MC_END();
3833 }
3834 }
3835 return VINF_SUCCESS;
3836}
3837
3838
3839/**
3840 * @opcode 0x2b
3841 * @opcodesub !11 mr/reg
3842 * @oppfx none
3843 * @opcpuid sse
3844 * @opgroup og_sse1_cachect
3845 * @opxcpttype 1
3846 * @optest op1=1 op2=2 -> op1=2
3847 * @optest op1=0 op2=-42 -> op1=-42
3848 */
3849FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3850{
3851 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3853 if (IEM_IS_MODRM_MEM_MODE(bRm))
3854 {
3855 /*
3856 * memory, register.
3857 */
3858 IEM_MC_BEGIN(0, 2);
3859 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3861
3862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3866
3867 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3868 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3869
3870 IEM_MC_ADVANCE_RIP();
3871 IEM_MC_END();
3872 }
3873 /* The register, register encoding is invalid. */
3874 else
3875 return IEMOP_RAISE_INVALID_OPCODE();
3876 return VINF_SUCCESS;
3877}
3878
3879/**
3880 * @opcode 0x2b
3881 * @opcodesub !11 mr/reg
3882 * @oppfx 0x66
3883 * @opcpuid sse2
3884 * @opgroup og_sse2_cachect
3885 * @opxcpttype 1
3886 * @optest op1=1 op2=2 -> op1=2
3887 * @optest op1=0 op2=-42 -> op1=-42
3888 */
3889FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3890{
3891 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3893 if (IEM_IS_MODRM_MEM_MODE(bRm))
3894 {
3895 /*
3896 * memory, register.
3897 */
3898 IEM_MC_BEGIN(0, 2);
3899 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3901
3902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3904 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3905 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3906
3907 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3908 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3909
3910 IEM_MC_ADVANCE_RIP();
3911 IEM_MC_END();
3912 }
3913 /* The register, register encoding is invalid. */
3914 else
3915 return IEMOP_RAISE_INVALID_OPCODE();
3916 return VINF_SUCCESS;
3917}
3918/* Opcode 0xf3 0x0f 0x2b - invalid */
3919/* Opcode 0xf2 0x0f 0x2b - invalid */
3920
3921
3922/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3923FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3924/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3925FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3926
3927
3928/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3929FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
3930{
3931 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3932
3933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3934 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3935 {
3936 if (IEM_IS_MODRM_REG_MODE(bRm))
3937 {
3938 /* greg64, XMM */
3939 IEM_MC_BEGIN(3, 4);
3940 IEM_MC_LOCAL(uint32_t, fMxcsr);
3941 IEM_MC_LOCAL(int64_t, i64Dst);
3942 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3943 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
3944 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
3945
3946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3947 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3948 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3949
3950 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3951 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
3952 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3953 IEM_MC_IF_MXCSR_XCPT_PENDING()
3954 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3955 IEM_MC_ELSE()
3956 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
3957 IEM_MC_ENDIF();
3958
3959 IEM_MC_ADVANCE_RIP();
3960 IEM_MC_END();
3961 }
3962 else
3963 {
3964 /* greg64, [mem64] */
3965 IEM_MC_BEGIN(3, 4);
3966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3967 IEM_MC_LOCAL(uint32_t, fMxcsr);
3968 IEM_MC_LOCAL(int64_t, i64Dst);
3969 IEM_MC_LOCAL(uint32_t, u32Src);
3970 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3971 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
3972 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
3973
3974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3977 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3978
3979 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3980 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
3981 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3982 IEM_MC_IF_MXCSR_XCPT_PENDING()
3983 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3984 IEM_MC_ELSE()
3985 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
3986 IEM_MC_ENDIF();
3987
3988 IEM_MC_ADVANCE_RIP();
3989 IEM_MC_END();
3990 }
3991 }
3992 else
3993 {
3994 if (IEM_IS_MODRM_REG_MODE(bRm))
3995 {
3996 /* greg, XMM */
3997 IEM_MC_BEGIN(3, 4);
3998 IEM_MC_LOCAL(uint32_t, fMxcsr);
3999 IEM_MC_LOCAL(int32_t, i32Dst);
4000 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4001 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4002 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4003
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4006 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4007
4008 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4009 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4010 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4011 IEM_MC_IF_MXCSR_XCPT_PENDING()
4012 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4013 IEM_MC_ELSE()
4014 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4015 IEM_MC_ENDIF();
4016
4017 IEM_MC_ADVANCE_RIP();
4018 IEM_MC_END();
4019 }
4020 else
4021 {
4022 /* greg, [mem] */
4023 IEM_MC_BEGIN(3, 4);
4024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4025 IEM_MC_LOCAL(uint32_t, fMxcsr);
4026 IEM_MC_LOCAL(int32_t, i32Dst);
4027 IEM_MC_LOCAL(uint32_t, u32Src);
4028 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4029 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4030 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4031
4032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4034 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4035 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4036
4037 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4038 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4039 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4040 IEM_MC_IF_MXCSR_XCPT_PENDING()
4041 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4042 IEM_MC_ELSE()
4043 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4044 IEM_MC_ENDIF();
4045
4046 IEM_MC_ADVANCE_RIP();
4047 IEM_MC_END();
4048 }
4049 }
4050 return VINF_SUCCESS;
4051}
4052
4053
4054/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4055FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4056{
4057 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4058
4059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4060 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4061 {
4062 if (IEM_IS_MODRM_REG_MODE(bRm))
4063 {
4064 /* greg64, XMM */
4065 IEM_MC_BEGIN(3, 4);
4066 IEM_MC_LOCAL(uint32_t, fMxcsr);
4067 IEM_MC_LOCAL(int64_t, i64Dst);
4068 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4069 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4070 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4071
4072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4073 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4074 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4075
4076 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4077 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4078 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4079 IEM_MC_IF_MXCSR_XCPT_PENDING()
4080 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4081 IEM_MC_ELSE()
4082 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4083 IEM_MC_ENDIF();
4084
4085 IEM_MC_ADVANCE_RIP();
4086 IEM_MC_END();
4087 }
4088 else
4089 {
4090 /* greg64, [mem64] */
4091 IEM_MC_BEGIN(3, 4);
4092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4093 IEM_MC_LOCAL(uint32_t, fMxcsr);
4094 IEM_MC_LOCAL(int64_t, i64Dst);
4095 IEM_MC_LOCAL(uint64_t, u64Src);
4096 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4097 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4098 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4099
4100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4102 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4103 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4104
4105 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4106 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4107 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4108 IEM_MC_IF_MXCSR_XCPT_PENDING()
4109 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4110 IEM_MC_ELSE()
4111 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4112 IEM_MC_ENDIF();
4113
4114 IEM_MC_ADVANCE_RIP();
4115 IEM_MC_END();
4116 }
4117 }
4118 else
4119 {
4120 if (IEM_IS_MODRM_REG_MODE(bRm))
4121 {
4122 /* greg, XMM */
4123 IEM_MC_BEGIN(3, 4);
4124 IEM_MC_LOCAL(uint32_t, fMxcsr);
4125 IEM_MC_LOCAL(int32_t, i32Dst);
4126 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4127 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4128 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4129
4130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4131 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4132 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4133
4134 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4135 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4136 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4137 IEM_MC_IF_MXCSR_XCPT_PENDING()
4138 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4139 IEM_MC_ELSE()
4140 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4141 IEM_MC_ENDIF();
4142
4143 IEM_MC_ADVANCE_RIP();
4144 IEM_MC_END();
4145 }
4146 else
4147 {
4148 /* greg, [mem] */
4149 IEM_MC_BEGIN(3, 4);
4150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4151 IEM_MC_LOCAL(uint32_t, fMxcsr);
4152 IEM_MC_LOCAL(int32_t, i32Dst);
4153 IEM_MC_LOCAL(uint64_t, u64Src);
4154 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4155 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4156 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4157
4158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4160 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4161 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4162
4163 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4164 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4165 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4166 IEM_MC_IF_MXCSR_XCPT_PENDING()
4167 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4168 IEM_MC_ELSE()
4169 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4170 IEM_MC_ENDIF();
4171
4172 IEM_MC_ADVANCE_RIP();
4173 IEM_MC_END();
4174 }
4175 }
4176 return VINF_SUCCESS;
4177}
4178
4179
4180/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4181FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
4182/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4183FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
4184
4185
4186/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4187FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4188{
4189 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4190
4191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4192 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4193 {
4194 if (IEM_IS_MODRM_REG_MODE(bRm))
4195 {
4196 /* greg64, XMM */
4197 IEM_MC_BEGIN(3, 4);
4198 IEM_MC_LOCAL(uint32_t, fMxcsr);
4199 IEM_MC_LOCAL(int64_t, i64Dst);
4200 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4201 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4202 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4203
4204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4205 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4206 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4207
4208 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4209 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4210 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4211 IEM_MC_IF_MXCSR_XCPT_PENDING()
4212 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4213 IEM_MC_ELSE()
4214 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4215 IEM_MC_ENDIF();
4216
4217 IEM_MC_ADVANCE_RIP();
4218 IEM_MC_END();
4219 }
4220 else
4221 {
4222 /* greg64, [mem64] */
4223 IEM_MC_BEGIN(3, 4);
4224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4225 IEM_MC_LOCAL(uint32_t, fMxcsr);
4226 IEM_MC_LOCAL(int64_t, i64Dst);
4227 IEM_MC_LOCAL(uint32_t, u32Src);
4228 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4229 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4230 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4231
4232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4234 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4235 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4236
4237 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4238 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4239 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4240 IEM_MC_IF_MXCSR_XCPT_PENDING()
4241 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4242 IEM_MC_ELSE()
4243 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4244 IEM_MC_ENDIF();
4245
4246 IEM_MC_ADVANCE_RIP();
4247 IEM_MC_END();
4248 }
4249 }
4250 else
4251 {
4252 if (IEM_IS_MODRM_REG_MODE(bRm))
4253 {
4254 /* greg, XMM */
4255 IEM_MC_BEGIN(3, 4);
4256 IEM_MC_LOCAL(uint32_t, fMxcsr);
4257 IEM_MC_LOCAL(int32_t, i32Dst);
4258 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4259 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4260 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4261
4262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4263 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4264 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4265
4266 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4267 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4268 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4269 IEM_MC_IF_MXCSR_XCPT_PENDING()
4270 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4271 IEM_MC_ELSE()
4272 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4273 IEM_MC_ENDIF();
4274
4275 IEM_MC_ADVANCE_RIP();
4276 IEM_MC_END();
4277 }
4278 else
4279 {
4280 /* greg, [mem] */
4281 IEM_MC_BEGIN(3, 4);
4282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4283 IEM_MC_LOCAL(uint32_t, fMxcsr);
4284 IEM_MC_LOCAL(int32_t, i32Dst);
4285 IEM_MC_LOCAL(uint32_t, u32Src);
4286 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4287 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4288 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4289
4290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4293 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4294
4295 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4296 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4297 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4298 IEM_MC_IF_MXCSR_XCPT_PENDING()
4299 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4300 IEM_MC_ELSE()
4301 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4302 IEM_MC_ENDIF();
4303
4304 IEM_MC_ADVANCE_RIP();
4305 IEM_MC_END();
4306 }
4307 }
4308 return VINF_SUCCESS;
4309}
4310
4311
4312/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4313FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4314{
4315 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4316
4317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4318 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4319 {
4320 if (IEM_IS_MODRM_REG_MODE(bRm))
4321 {
4322 /* greg64, XMM */
4323 IEM_MC_BEGIN(3, 4);
4324 IEM_MC_LOCAL(uint32_t, fMxcsr);
4325 IEM_MC_LOCAL(int64_t, i64Dst);
4326 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4327 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4328 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4329
4330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4331 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4332 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4333
4334 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4335 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4336 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4337 IEM_MC_IF_MXCSR_XCPT_PENDING()
4338 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4339 IEM_MC_ELSE()
4340 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4341 IEM_MC_ENDIF();
4342
4343 IEM_MC_ADVANCE_RIP();
4344 IEM_MC_END();
4345 }
4346 else
4347 {
4348 /* greg64, [mem64] */
4349 IEM_MC_BEGIN(3, 4);
4350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4351 IEM_MC_LOCAL(uint32_t, fMxcsr);
4352 IEM_MC_LOCAL(int64_t, i64Dst);
4353 IEM_MC_LOCAL(uint64_t, u64Src);
4354 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4355 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4356 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4357
4358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4361 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4362
4363 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4364 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4365 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4366 IEM_MC_IF_MXCSR_XCPT_PENDING()
4367 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4368 IEM_MC_ELSE()
4369 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4370 IEM_MC_ENDIF();
4371
4372 IEM_MC_ADVANCE_RIP();
4373 IEM_MC_END();
4374 }
4375 }
4376 else
4377 {
4378 if (IEM_IS_MODRM_REG_MODE(bRm))
4379 {
4380 /* greg, XMM */
4381 IEM_MC_BEGIN(3, 4);
4382 IEM_MC_LOCAL(uint32_t, fMxcsr);
4383 IEM_MC_LOCAL(int32_t, i32Dst);
4384 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4385 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4386 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4387
4388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4389 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4390 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4391
4392 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4393 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4394 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4395 IEM_MC_IF_MXCSR_XCPT_PENDING()
4396 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4397 IEM_MC_ELSE()
4398 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4399 IEM_MC_ENDIF();
4400
4401 IEM_MC_ADVANCE_RIP();
4402 IEM_MC_END();
4403 }
4404 else
4405 {
4406 /* greg, [mem] */
4407 IEM_MC_BEGIN(3, 4);
4408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4409 IEM_MC_LOCAL(uint32_t, fMxcsr);
4410 IEM_MC_LOCAL(int32_t, i32Dst);
4411 IEM_MC_LOCAL(uint64_t, u64Src);
4412 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4413 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4414 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4415
4416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4418 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4419 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4420
4421 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4422 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4423 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4424 IEM_MC_IF_MXCSR_XCPT_PENDING()
4425 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4426 IEM_MC_ELSE()
4427 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4428 IEM_MC_ENDIF();
4429
4430 IEM_MC_ADVANCE_RIP();
4431 IEM_MC_END();
4432 }
4433 }
4434 return VINF_SUCCESS;
4435}
4436
4437
4438/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4439FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4440{
4441 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4443 if (IEM_IS_MODRM_REG_MODE(bRm))
4444 {
4445 /*
4446 * Register, register.
4447 */
4448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4449 IEM_MC_BEGIN(4, 1);
4450 IEM_MC_LOCAL(uint32_t, fEFlags);
4451 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4452 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4453 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4454 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4455 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4456 IEM_MC_PREPARE_SSE_USAGE();
4457 IEM_MC_FETCH_EFLAGS(fEFlags);
4458 IEM_MC_REF_MXCSR(pfMxcsr);
4459 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4460 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4461 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4462 IEM_MC_IF_MXCSR_XCPT_PENDING()
4463 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4464 IEM_MC_ELSE()
4465 IEM_MC_COMMIT_EFLAGS(fEFlags);
4466 IEM_MC_ENDIF();
4467
4468 IEM_MC_ADVANCE_RIP();
4469 IEM_MC_END();
4470 }
4471 else
4472 {
4473 /*
4474 * Register, memory.
4475 */
4476 IEM_MC_BEGIN(4, 3);
4477 IEM_MC_LOCAL(uint32_t, fEFlags);
4478 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4479 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4480 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4481 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4482 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4484
4485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4488 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4489
4490 IEM_MC_PREPARE_SSE_USAGE();
4491 IEM_MC_REF_MXCSR(pfMxcsr);
4492 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4493 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4494 IEM_MC_IF_MXCSR_XCPT_PENDING()
4495 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4496 IEM_MC_ELSE()
4497 IEM_MC_COMMIT_EFLAGS(fEFlags);
4498 IEM_MC_ENDIF();
4499
4500 IEM_MC_ADVANCE_RIP();
4501 IEM_MC_END();
4502 }
4503 return VINF_SUCCESS;
4504}
4505
4506
4507/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4508FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4509{
4510 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4512 if (IEM_IS_MODRM_REG_MODE(bRm))
4513 {
4514 /*
4515 * Register, register.
4516 */
4517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4518 IEM_MC_BEGIN(4, 1);
4519 IEM_MC_LOCAL(uint32_t, fEFlags);
4520 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4521 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4522 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4523 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4524 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4525 IEM_MC_PREPARE_SSE_USAGE();
4526 IEM_MC_FETCH_EFLAGS(fEFlags);
4527 IEM_MC_REF_MXCSR(pfMxcsr);
4528 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4529 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4530 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4531 IEM_MC_IF_MXCSR_XCPT_PENDING()
4532 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4533 IEM_MC_ELSE()
4534 IEM_MC_COMMIT_EFLAGS(fEFlags);
4535 IEM_MC_ENDIF();
4536
4537 IEM_MC_ADVANCE_RIP();
4538 IEM_MC_END();
4539 }
4540 else
4541 {
4542 /*
4543 * Register, memory.
4544 */
4545 IEM_MC_BEGIN(4, 3);
4546 IEM_MC_LOCAL(uint32_t, fEFlags);
4547 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4548 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4549 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4550 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4551 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4553
4554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4556 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4557 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4558
4559 IEM_MC_PREPARE_SSE_USAGE();
4560 IEM_MC_REF_MXCSR(pfMxcsr);
4561 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4562 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4563 IEM_MC_IF_MXCSR_XCPT_PENDING()
4564 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4565 IEM_MC_ELSE()
4566 IEM_MC_COMMIT_EFLAGS(fEFlags);
4567 IEM_MC_ENDIF();
4568
4569 IEM_MC_ADVANCE_RIP();
4570 IEM_MC_END();
4571 }
4572 return VINF_SUCCESS;
4573}
4574
4575
4576/* Opcode 0xf3 0x0f 0x2e - invalid */
4577/* Opcode 0xf2 0x0f 0x2e - invalid */
4578
4579
4580/** Opcode 0x0f 0x2f - comiss Vss, Wss */
4581FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4582{
4583 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4585 if (IEM_IS_MODRM_REG_MODE(bRm))
4586 {
4587 /*
4588 * Register, register.
4589 */
4590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4591 IEM_MC_BEGIN(4, 1);
4592 IEM_MC_LOCAL(uint32_t, fEFlags);
4593 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4594 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4595 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4596 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4597 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4598 IEM_MC_PREPARE_SSE_USAGE();
4599 IEM_MC_FETCH_EFLAGS(fEFlags);
4600 IEM_MC_REF_MXCSR(pfMxcsr);
4601 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4602 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4603 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4604 IEM_MC_IF_MXCSR_XCPT_PENDING()
4605 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4606 IEM_MC_ELSE()
4607 IEM_MC_COMMIT_EFLAGS(fEFlags);
4608 IEM_MC_ENDIF();
4609
4610 IEM_MC_ADVANCE_RIP();
4611 IEM_MC_END();
4612 }
4613 else
4614 {
4615 /*
4616 * Register, memory.
4617 */
4618 IEM_MC_BEGIN(4, 3);
4619 IEM_MC_LOCAL(uint32_t, fEFlags);
4620 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4621 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4622 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4623 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4624 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4626
4627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4629 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4630 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4631
4632 IEM_MC_PREPARE_SSE_USAGE();
4633 IEM_MC_REF_MXCSR(pfMxcsr);
4634 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4635 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4636 IEM_MC_IF_MXCSR_XCPT_PENDING()
4637 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4638 IEM_MC_ELSE()
4639 IEM_MC_COMMIT_EFLAGS(fEFlags);
4640 IEM_MC_ENDIF();
4641
4642 IEM_MC_ADVANCE_RIP();
4643 IEM_MC_END();
4644 }
4645 return VINF_SUCCESS;
4646}
4647
4648
4649/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
4650FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4651{
4652 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4654 if (IEM_IS_MODRM_REG_MODE(bRm))
4655 {
4656 /*
4657 * Register, register.
4658 */
4659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4660 IEM_MC_BEGIN(4, 1);
4661 IEM_MC_LOCAL(uint32_t, fEFlags);
4662 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4663 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4664 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4665 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4666 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4667 IEM_MC_PREPARE_SSE_USAGE();
4668 IEM_MC_FETCH_EFLAGS(fEFlags);
4669 IEM_MC_REF_MXCSR(pfMxcsr);
4670 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4671 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4672 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4673 IEM_MC_IF_MXCSR_XCPT_PENDING()
4674 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4675 IEM_MC_ELSE()
4676 IEM_MC_COMMIT_EFLAGS(fEFlags);
4677 IEM_MC_ENDIF();
4678
4679 IEM_MC_ADVANCE_RIP();
4680 IEM_MC_END();
4681 }
4682 else
4683 {
4684 /*
4685 * Register, memory.
4686 */
4687 IEM_MC_BEGIN(4, 3);
4688 IEM_MC_LOCAL(uint32_t, fEFlags);
4689 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4690 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4691 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4692 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4693 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4695
4696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4699 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4700
4701 IEM_MC_PREPARE_SSE_USAGE();
4702 IEM_MC_REF_MXCSR(pfMxcsr);
4703 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4704 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4705 IEM_MC_IF_MXCSR_XCPT_PENDING()
4706 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4707 IEM_MC_ELSE()
4708 IEM_MC_COMMIT_EFLAGS(fEFlags);
4709 IEM_MC_ENDIF();
4710
4711 IEM_MC_ADVANCE_RIP();
4712 IEM_MC_END();
4713 }
4714 return VINF_SUCCESS;
4715}
4716
4717
4718/* Opcode 0xf3 0x0f 0x2f - invalid */
4719/* Opcode 0xf2 0x0f 0x2f - invalid */
4720
4721/** Opcode 0x0f 0x30. */
4722FNIEMOP_DEF(iemOp_wrmsr)
4723{
4724 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4726 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
4727}
4728
4729
4730/** Opcode 0x0f 0x31. */
4731FNIEMOP_DEF(iemOp_rdtsc)
4732{
4733 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4735 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
4736}
4737
4738
4739/** Opcode 0x0f 0x33. */
4740FNIEMOP_DEF(iemOp_rdmsr)
4741{
4742 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4744 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
4745}
4746
4747
4748/** Opcode 0x0f 0x34. */
4749FNIEMOP_DEF(iemOp_rdpmc)
4750{
4751 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4753 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
4754}
4755
4756
4757/** Opcode 0x0f 0x34. */
4758FNIEMOP_DEF(iemOp_sysenter)
4759{
4760 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4762 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
4763}
4764
4765/** Opcode 0x0f 0x35. */
4766FNIEMOP_DEF(iemOp_sysexit)
4767{
4768 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4770 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4771}
4772
4773/** Opcode 0x0f 0x37. */
4774FNIEMOP_STUB(iemOp_getsec);
4775
4776
4777/** Opcode 0x0f 0x38. */
4778FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4779{
4780#ifdef IEM_WITH_THREE_0F_38
4781 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4782 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4783#else
4784 IEMOP_BITCH_ABOUT_STUB();
4785 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4786#endif
4787}
4788
4789
4790/** Opcode 0x0f 0x3a. */
4791FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4792{
4793#ifdef IEM_WITH_THREE_0F_3A
4794 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4795 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4796#else
4797 IEMOP_BITCH_ABOUT_STUB();
4798 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4799#endif
4800}
4801
4802
4803/**
4804 * Implements a conditional move.
4805 *
4806 * Wish there was an obvious way to do this where we could share and reduce
4807 * code bloat.
4808 *
4809 * @param a_Cnd The conditional "microcode" operation.
4810 */
4811#define CMOV_X(a_Cnd) \
4812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4813 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4814 { \
4815 switch (pVCpu->iem.s.enmEffOpSize) \
4816 { \
4817 case IEMMODE_16BIT: \
4818 IEM_MC_BEGIN(0, 1); \
4819 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4820 a_Cnd { \
4821 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4822 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4823 } IEM_MC_ENDIF(); \
4824 IEM_MC_ADVANCE_RIP(); \
4825 IEM_MC_END(); \
4826 return VINF_SUCCESS; \
4827 \
4828 case IEMMODE_32BIT: \
4829 IEM_MC_BEGIN(0, 1); \
4830 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4831 a_Cnd { \
4832 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4833 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4834 } IEM_MC_ELSE() { \
4835 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4836 } IEM_MC_ENDIF(); \
4837 IEM_MC_ADVANCE_RIP(); \
4838 IEM_MC_END(); \
4839 return VINF_SUCCESS; \
4840 \
4841 case IEMMODE_64BIT: \
4842 IEM_MC_BEGIN(0, 1); \
4843 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4844 a_Cnd { \
4845 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4846 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4847 } IEM_MC_ENDIF(); \
4848 IEM_MC_ADVANCE_RIP(); \
4849 IEM_MC_END(); \
4850 return VINF_SUCCESS; \
4851 \
4852 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4853 } \
4854 } \
4855 else \
4856 { \
4857 switch (pVCpu->iem.s.enmEffOpSize) \
4858 { \
4859 case IEMMODE_16BIT: \
4860 IEM_MC_BEGIN(0, 2); \
4861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4862 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4864 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4865 a_Cnd { \
4866 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4867 } IEM_MC_ENDIF(); \
4868 IEM_MC_ADVANCE_RIP(); \
4869 IEM_MC_END(); \
4870 return VINF_SUCCESS; \
4871 \
4872 case IEMMODE_32BIT: \
4873 IEM_MC_BEGIN(0, 2); \
4874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4875 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4877 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4878 a_Cnd { \
4879 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4880 } IEM_MC_ELSE() { \
4881 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4882 } IEM_MC_ENDIF(); \
4883 IEM_MC_ADVANCE_RIP(); \
4884 IEM_MC_END(); \
4885 return VINF_SUCCESS; \
4886 \
4887 case IEMMODE_64BIT: \
4888 IEM_MC_BEGIN(0, 2); \
4889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4890 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4892 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4893 a_Cnd { \
4894 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4895 } IEM_MC_ENDIF(); \
4896 IEM_MC_ADVANCE_RIP(); \
4897 IEM_MC_END(); \
4898 return VINF_SUCCESS; \
4899 \
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4901 } \
4902 } do {} while (0)
4903
4904
4905
4906/** Opcode 0x0f 0x40. */
4907FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
4908{
4909 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
4910 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
4911}
4912
4913
4914/** Opcode 0x0f 0x41. */
4915FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
4916{
4917 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
4918 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
4919}
4920
4921
4922/** Opcode 0x0f 0x42. */
4923FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
4924{
4925 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
4926 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
4927}
4928
4929
4930/** Opcode 0x0f 0x43. */
4931FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
4932{
4933 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
4934 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
4935}
4936
4937
4938/** Opcode 0x0f 0x44. */
4939FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
4940{
4941 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
4942 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
4943}
4944
4945
4946/** Opcode 0x0f 0x45. */
4947FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
4948{
4949 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
4950 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
4951}
4952
4953
4954/** Opcode 0x0f 0x46. */
4955FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
4956{
4957 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
4958 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
4959}
4960
4961
4962/** Opcode 0x0f 0x47. */
4963FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
4964{
4965 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
4966 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
4967}
4968
4969
4970/** Opcode 0x0f 0x48. */
4971FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
4972{
4973 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
4974 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
4975}
4976
4977
4978/** Opcode 0x0f 0x49. */
4979FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
4980{
4981 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
4982 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
4983}
4984
4985
4986/** Opcode 0x0f 0x4a. */
4987FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
4988{
4989 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
4990 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
4991}
4992
4993
4994/** Opcode 0x0f 0x4b. */
4995FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
4996{
4997 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
4998 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
4999}
5000
5001
5002/** Opcode 0x0f 0x4c. */
5003FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5004{
5005 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5006 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5007}
5008
5009
5010/** Opcode 0x0f 0x4d. */
5011FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5012{
5013 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5014 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5015}
5016
5017
5018/** Opcode 0x0f 0x4e. */
5019FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5020{
5021 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5022 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5023}
5024
5025
5026/** Opcode 0x0f 0x4f. */
5027FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5028{
5029 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5030 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5031}
5032
5033#undef CMOV_X
5034
5035/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5036FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5037{
5038 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5040 if (IEM_IS_MODRM_REG_MODE(bRm))
5041 {
5042 /*
5043 * Register, register.
5044 */
5045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5046 IEM_MC_BEGIN(2, 1);
5047 IEM_MC_LOCAL(uint8_t, u8Dst);
5048 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5049 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5050 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5051 IEM_MC_PREPARE_SSE_USAGE();
5052 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5053 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5054 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5055 IEM_MC_ADVANCE_RIP();
5056 IEM_MC_END();
5057 return VINF_SUCCESS;
5058 }
5059
5060 /* No memory operand. */
5061 return IEMOP_RAISE_INVALID_OPCODE();
5062}
5063
5064
5065/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5066FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5067{
5068 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5070 if (IEM_IS_MODRM_REG_MODE(bRm))
5071 {
5072 /*
5073 * Register, register.
5074 */
5075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5076 IEM_MC_BEGIN(2, 1);
5077 IEM_MC_LOCAL(uint8_t, u8Dst);
5078 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5079 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5080 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5081 IEM_MC_PREPARE_SSE_USAGE();
5082 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5083 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5084 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5085 IEM_MC_ADVANCE_RIP();
5086 IEM_MC_END();
5087 return VINF_SUCCESS;
5088 }
5089
5090 /* No memory operand. */
5091 return IEMOP_RAISE_INVALID_OPCODE();
5092
5093}
5094
5095
5096/* Opcode 0xf3 0x0f 0x50 - invalid */
5097/* Opcode 0xf2 0x0f 0x50 - invalid */
5098
5099
5100/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5101FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5102{
5103 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5104 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5105}
5106
5107
5108/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5109FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5110{
5111 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5112 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5113}
5114
5115
5116/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5117FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5118{
5119 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5120 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5121}
5122
5123
5124/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5125FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5126{
5127 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5128 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5129}
5130
5131
5132/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5133FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
5134/* Opcode 0x66 0x0f 0x52 - invalid */
5135/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5136FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
5137/* Opcode 0xf2 0x0f 0x52 - invalid */
5138
5139/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5140FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5141/* Opcode 0x66 0x0f 0x53 - invalid */
5142/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5143FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5144/* Opcode 0xf2 0x0f 0x53 - invalid */
5145
5146
5147/** Opcode 0x0f 0x54 - andps Vps, Wps */
5148FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5149{
5150 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5151 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5152}
5153
5154
5155/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5156FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5157{
5158 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5159 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5160}
5161
5162
5163/* Opcode 0xf3 0x0f 0x54 - invalid */
5164/* Opcode 0xf2 0x0f 0x54 - invalid */
5165
5166
5167/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5168FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5169{
5170 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5171 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5172}
5173
5174
5175/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5176FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5177{
5178 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5179 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5180}
5181
5182
5183/* Opcode 0xf3 0x0f 0x55 - invalid */
5184/* Opcode 0xf2 0x0f 0x55 - invalid */
5185
5186
5187/** Opcode 0x0f 0x56 - orps Vps, Wps */
5188FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5189{
5190 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5191 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5192}
5193
5194
5195/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5196FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5197{
5198 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5199 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5200}
5201
5202
5203/* Opcode 0xf3 0x0f 0x56 - invalid */
5204/* Opcode 0xf2 0x0f 0x56 - invalid */
5205
5206
5207/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5208FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5209{
5210 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5211 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5212}
5213
5214
5215/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5216FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5217{
5218 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5219 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5220}
5221
5222
5223/* Opcode 0xf3 0x0f 0x57 - invalid */
5224/* Opcode 0xf2 0x0f 0x57 - invalid */
5225
5226/** Opcode 0x0f 0x58 - addps Vps, Wps */
5227FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5228{
5229 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5230 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5231}
5232
5233
5234/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5235FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5236{
5237 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5238 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5239}
5240
5241
5242/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5243FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5244{
5245 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5246 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5247}
5248
5249
5250/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5251FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5252{
5253 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5254 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5255}
5256
5257
5258/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5259FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5260{
5261 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5262 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5263}
5264
5265
5266/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5267FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5268{
5269 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5270 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5271}
5272
5273
5274/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5275FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5276{
5277 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5278 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5279}
5280
5281
5282/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5283FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5284{
5285 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5286 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5287}
5288
5289
5290/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5291FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5292{
5293 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5294 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5295}
5296
5297
5298/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5299FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5300{
5301 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5302 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5303}
5304
5305
5306/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5307FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5308{
5309 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5310 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5311}
5312
5313
5314/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5315FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5316{
5317 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5318 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5319}
5320
5321
5322/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5323FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
5324/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5325FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
5326/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5327FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
5328/* Opcode 0xf2 0x0f 0x5b - invalid */
5329
5330
5331/** Opcode 0x0f 0x5c - subps Vps, Wps */
5332FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5333{
5334 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5335 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5336}
5337
5338
5339/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5340FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5341{
5342 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5343 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5344}
5345
5346
5347/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5348FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5349{
5350 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5351 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5352}
5353
5354
5355/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5356FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5357{
5358 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5359 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5360}
5361
5362
5363/** Opcode 0x0f 0x5d - minps Vps, Wps */
5364FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5365{
5366 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5367 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5368}
5369
5370
5371/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5372FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5373{
5374 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5375 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5376}
5377
5378
5379/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5380FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5381{
5382 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5383 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5384}
5385
5386
5387/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5388FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5389{
5390 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5391 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5392}
5393
5394
5395/** Opcode 0x0f 0x5e - divps Vps, Wps */
5396FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5397{
5398 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5399 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5400}
5401
5402
5403/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5404FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5405{
5406 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5407 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5408}
5409
5410
5411/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5412FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5413{
5414 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5415 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5416}
5417
5418
5419/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5420FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5421{
5422 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5423 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5424}
5425
5426
5427/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5428FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5429{
5430 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5431 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5432}
5433
5434
5435/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5436FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5437{
5438 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5439 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5440}
5441
5442
5443/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5444FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5445{
5446 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5447 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5448}
5449
5450
5451/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5452FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5453{
5454 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5455 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5456}
5457
5458
5459/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5460FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5461{
5462 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5463 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5464}
5465
5466
5467/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5468FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5469{
5470 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5471 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5472}
5473
5474
5475/* Opcode 0xf3 0x0f 0x60 - invalid */
5476
5477
5478/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5479FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5480{
5481 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5482 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5483 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5484}
5485
5486
5487/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5488FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5489{
5490 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5491 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5492}
5493
5494
5495/* Opcode 0xf3 0x0f 0x61 - invalid */
5496
5497
5498/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5499FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5500{
5501 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5502 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5503}
5504
5505
5506/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5507FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5508{
5509 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5510 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5511}
5512
5513
5514/* Opcode 0xf3 0x0f 0x62 - invalid */
5515
5516
5517
5518/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5519FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5520{
5521 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5522 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5523}
5524
5525
5526/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5527FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5528{
5529 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5530 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5531}
5532
5533
5534/* Opcode 0xf3 0x0f 0x63 - invalid */
5535
5536
5537/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5538FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5539{
5540 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5541 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5542}
5543
5544
5545/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5546FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5547{
5548 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5549 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5550}
5551
5552
5553/* Opcode 0xf3 0x0f 0x64 - invalid */
5554
5555
5556/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5557FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5558{
5559 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5560 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5561}
5562
5563
5564/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5565FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5566{
5567 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5568 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5569}
5570
5571
5572/* Opcode 0xf3 0x0f 0x65 - invalid */
5573
5574
5575/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5576FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5577{
5578 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5579 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5580}
5581
5582
5583/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5584FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5585{
5586 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5587 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5588}
5589
5590
5591/* Opcode 0xf3 0x0f 0x66 - invalid */
5592
5593
5594/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5595FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5596{
5597 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5598 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5599}
5600
5601
5602/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5603FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5604{
5605 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5606 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
5607}
5608
5609
5610/* Opcode 0xf3 0x0f 0x67 - invalid */
5611
5612
5613/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5614 * @note Intel and AMD both uses Qd for the second parameter, however they
5615 * both list it as a mmX/mem64 operand and intel describes it as being
5616 * loaded as a qword, so it should be Qq, shouldn't it? */
5617FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5618{
5619 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5620 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5621}
5622
5623
5624/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5625FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5626{
5627 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5628 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5629}
5630
5631
5632/* Opcode 0xf3 0x0f 0x68 - invalid */
5633
5634
5635/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5636 * @note Intel and AMD both uses Qd for the second parameter, however they
5637 * both list it as a mmX/mem64 operand and intel describes it as being
5638 * loaded as a qword, so it should be Qq, shouldn't it? */
5639FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5640{
5641 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5642 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5643}
5644
5645
5646/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5647FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5648{
5649 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5650 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5651
5652}
5653
5654
5655/* Opcode 0xf3 0x0f 0x69 - invalid */
5656
5657
5658/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5659 * @note Intel and AMD both uses Qd for the second parameter, however they
5660 * both list it as a mmX/mem64 operand and intel describes it as being
5661 * loaded as a qword, so it should be Qq, shouldn't it? */
5662FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5663{
5664 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5665 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5666}
5667
5668
5669/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5670FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5671{
5672 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5673 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5674}
5675
5676
5677/* Opcode 0xf3 0x0f 0x6a - invalid */
5678
5679
5680/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5681FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5682{
5683 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5684 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5685}
5686
5687
5688/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5689FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5690{
5691 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5692 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5693}
5694
5695
5696/* Opcode 0xf3 0x0f 0x6b - invalid */
5697
5698
5699/* Opcode 0x0f 0x6c - invalid */
5700
5701
5702/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5703FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5704{
5705 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5706 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5707}
5708
5709
5710/* Opcode 0xf3 0x0f 0x6c - invalid */
5711/* Opcode 0xf2 0x0f 0x6c - invalid */
5712
5713
5714/* Opcode 0x0f 0x6d - invalid */
5715
5716
5717/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5718FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5719{
5720 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5721 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5722}
5723
5724
5725/* Opcode 0xf3 0x0f 0x6d - invalid */
5726
5727
5728FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
5729{
5730 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5731 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5732 {
5733 /**
5734 * @opcode 0x6e
5735 * @opcodesub rex.w=1
5736 * @oppfx none
5737 * @opcpuid mmx
5738 * @opgroup og_mmx_datamove
5739 * @opxcpttype 5
5740 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5741 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5742 */
5743 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5744 if (IEM_IS_MODRM_REG_MODE(bRm))
5745 {
5746 /* MMX, greg64 */
5747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5748 IEM_MC_BEGIN(0, 1);
5749 IEM_MC_LOCAL(uint64_t, u64Tmp);
5750
5751 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5752 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5753
5754 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5755 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5756 IEM_MC_FPU_TO_MMX_MODE();
5757
5758 IEM_MC_ADVANCE_RIP();
5759 IEM_MC_END();
5760 }
5761 else
5762 {
5763 /* MMX, [mem64] */
5764 IEM_MC_BEGIN(0, 2);
5765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5766 IEM_MC_LOCAL(uint64_t, u64Tmp);
5767
5768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5770 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5771 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5772
5773 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5774 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5775 IEM_MC_FPU_TO_MMX_MODE();
5776
5777 IEM_MC_ADVANCE_RIP();
5778 IEM_MC_END();
5779 }
5780 }
5781 else
5782 {
5783 /**
5784 * @opdone
5785 * @opcode 0x6e
5786 * @opcodesub rex.w=0
5787 * @oppfx none
5788 * @opcpuid mmx
5789 * @opgroup og_mmx_datamove
5790 * @opxcpttype 5
5791 * @opfunction iemOp_movd_q_Pd_Ey
5792 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5793 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5794 */
5795 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5796 if (IEM_IS_MODRM_REG_MODE(bRm))
5797 {
5798 /* MMX, greg */
5799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5800 IEM_MC_BEGIN(0, 1);
5801 IEM_MC_LOCAL(uint64_t, u64Tmp);
5802
5803 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5804 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5805
5806 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5807 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5808 IEM_MC_FPU_TO_MMX_MODE();
5809
5810 IEM_MC_ADVANCE_RIP();
5811 IEM_MC_END();
5812 }
5813 else
5814 {
5815 /* MMX, [mem] */
5816 IEM_MC_BEGIN(0, 2);
5817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5818 IEM_MC_LOCAL(uint32_t, u32Tmp);
5819
5820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5822 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5823 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5824
5825 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5826 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
5827 IEM_MC_FPU_TO_MMX_MODE();
5828
5829 IEM_MC_ADVANCE_RIP();
5830 IEM_MC_END();
5831 }
5832 }
5833 return VINF_SUCCESS;
5834}
5835
5836FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
5837{
5838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5839 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5840 {
5841 /**
5842 * @opcode 0x6e
5843 * @opcodesub rex.w=1
5844 * @oppfx 0x66
5845 * @opcpuid sse2
5846 * @opgroup og_sse2_simdint_datamove
5847 * @opxcpttype 5
5848 * @optest 64-bit / op1=1 op2=2 -> op1=2
5849 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5850 */
5851 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5852 if (IEM_IS_MODRM_REG_MODE(bRm))
5853 {
5854 /* XMM, greg64 */
5855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5856 IEM_MC_BEGIN(0, 1);
5857 IEM_MC_LOCAL(uint64_t, u64Tmp);
5858
5859 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5860 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5861
5862 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5863 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
5864
5865 IEM_MC_ADVANCE_RIP();
5866 IEM_MC_END();
5867 }
5868 else
5869 {
5870 /* XMM, [mem64] */
5871 IEM_MC_BEGIN(0, 2);
5872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5873 IEM_MC_LOCAL(uint64_t, u64Tmp);
5874
5875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5877 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5878 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5879
5880 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5881 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
5882
5883 IEM_MC_ADVANCE_RIP();
5884 IEM_MC_END();
5885 }
5886 }
5887 else
5888 {
5889 /**
5890 * @opdone
5891 * @opcode 0x6e
5892 * @opcodesub rex.w=0
5893 * @oppfx 0x66
5894 * @opcpuid sse2
5895 * @opgroup og_sse2_simdint_datamove
5896 * @opxcpttype 5
5897 * @opfunction iemOp_movd_q_Vy_Ey
5898 * @optest op1=1 op2=2 -> op1=2
5899 * @optest op1=0 op2=-42 -> op1=-42
5900 */
5901 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5902 if (IEM_IS_MODRM_REG_MODE(bRm))
5903 {
5904 /* XMM, greg32 */
5905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5906 IEM_MC_BEGIN(0, 1);
5907 IEM_MC_LOCAL(uint32_t, u32Tmp);
5908
5909 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5910 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5911
5912 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
5913 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
5914
5915 IEM_MC_ADVANCE_RIP();
5916 IEM_MC_END();
5917 }
5918 else
5919 {
5920 /* XMM, [mem32] */
5921 IEM_MC_BEGIN(0, 2);
5922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5923 IEM_MC_LOCAL(uint32_t, u32Tmp);
5924
5925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5927 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5928 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5929
5930 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5931 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
5932
5933 IEM_MC_ADVANCE_RIP();
5934 IEM_MC_END();
5935 }
5936 }
5937 return VINF_SUCCESS;
5938}
5939
5940/* Opcode 0xf3 0x0f 0x6e - invalid */
5941
5942
5943/**
5944 * @opcode 0x6f
5945 * @oppfx none
5946 * @opcpuid mmx
5947 * @opgroup og_mmx_datamove
5948 * @opxcpttype 5
5949 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5950 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5951 */
5952FNIEMOP_DEF(iemOp_movq_Pq_Qq)
5953{
5954 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5955 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5956 if (IEM_IS_MODRM_REG_MODE(bRm))
5957 {
5958 /*
5959 * Register, register.
5960 */
5961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5962 IEM_MC_BEGIN(0, 1);
5963 IEM_MC_LOCAL(uint64_t, u64Tmp);
5964
5965 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5966 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5967
5968 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
5969 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5970 IEM_MC_FPU_TO_MMX_MODE();
5971
5972 IEM_MC_ADVANCE_RIP();
5973 IEM_MC_END();
5974 }
5975 else
5976 {
5977 /*
5978 * Register, memory.
5979 */
5980 IEM_MC_BEGIN(0, 2);
5981 IEM_MC_LOCAL(uint64_t, u64Tmp);
5982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5983
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5987 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5988
5989 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5990 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
5991 IEM_MC_FPU_TO_MMX_MODE();
5992
5993 IEM_MC_ADVANCE_RIP();
5994 IEM_MC_END();
5995 }
5996 return VINF_SUCCESS;
5997}
5998
5999/**
6000 * @opcode 0x6f
6001 * @oppfx 0x66
6002 * @opcpuid sse2
6003 * @opgroup og_sse2_simdint_datamove
6004 * @opxcpttype 1
6005 * @optest op1=1 op2=2 -> op1=2
6006 * @optest op1=0 op2=-42 -> op1=-42
6007 */
6008FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6009{
6010 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6012 if (IEM_IS_MODRM_REG_MODE(bRm))
6013 {
6014 /*
6015 * Register, register.
6016 */
6017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6018 IEM_MC_BEGIN(0, 0);
6019
6020 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6021 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6022
6023 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6024 IEM_GET_MODRM_RM(pVCpu, bRm));
6025 IEM_MC_ADVANCE_RIP();
6026 IEM_MC_END();
6027 }
6028 else
6029 {
6030 /*
6031 * Register, memory.
6032 */
6033 IEM_MC_BEGIN(0, 2);
6034 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6036
6037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6039 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6040 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6041
6042 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6043 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6044
6045 IEM_MC_ADVANCE_RIP();
6046 IEM_MC_END();
6047 }
6048 return VINF_SUCCESS;
6049}
6050
6051/**
6052 * @opcode 0x6f
6053 * @oppfx 0xf3
6054 * @opcpuid sse2
6055 * @opgroup og_sse2_simdint_datamove
6056 * @opxcpttype 4UA
6057 * @optest op1=1 op2=2 -> op1=2
6058 * @optest op1=0 op2=-42 -> op1=-42
6059 */
6060FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6061{
6062 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6064 if (IEM_IS_MODRM_REG_MODE(bRm))
6065 {
6066 /*
6067 * Register, register.
6068 */
6069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6070 IEM_MC_BEGIN(0, 0);
6071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6073 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6074 IEM_GET_MODRM_RM(pVCpu, bRm));
6075 IEM_MC_ADVANCE_RIP();
6076 IEM_MC_END();
6077 }
6078 else
6079 {
6080 /*
6081 * Register, memory.
6082 */
6083 IEM_MC_BEGIN(0, 2);
6084 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6086
6087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6089 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6090 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6091 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6092 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6093
6094 IEM_MC_ADVANCE_RIP();
6095 IEM_MC_END();
6096 }
6097 return VINF_SUCCESS;
6098}
6099
6100
6101/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6102FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6103{
6104 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6106 if (IEM_IS_MODRM_REG_MODE(bRm))
6107 {
6108 /*
6109 * Register, register.
6110 */
6111 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6113
6114 IEM_MC_BEGIN(3, 0);
6115 IEM_MC_ARG(uint64_t *, pDst, 0);
6116 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6117 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6119 IEM_MC_PREPARE_FPU_USAGE();
6120 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6121 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6122 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6123 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6124 IEM_MC_FPU_TO_MMX_MODE();
6125 IEM_MC_ADVANCE_RIP();
6126 IEM_MC_END();
6127 }
6128 else
6129 {
6130 /*
6131 * Register, memory.
6132 */
6133 IEM_MC_BEGIN(3, 2);
6134 IEM_MC_ARG(uint64_t *, pDst, 0);
6135 IEM_MC_LOCAL(uint64_t, uSrc);
6136 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6138
6139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6140 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6141 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6144
6145 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6146 IEM_MC_PREPARE_FPU_USAGE();
6147 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6148 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6149 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6150 IEM_MC_FPU_TO_MMX_MODE();
6151
6152 IEM_MC_ADVANCE_RIP();
6153 IEM_MC_END();
6154 }
6155 return VINF_SUCCESS;
6156}
6157
6158
6159/**
6160 * Common worker for SSE2 instructions on the forms:
6161 * pshufd xmm1, xmm2/mem128, imm8
6162 * pshufhw xmm1, xmm2/mem128, imm8
6163 * pshuflw xmm1, xmm2/mem128, imm8
6164 *
6165 * Proper alignment of the 128-bit operand is enforced.
6166 * Exceptions type 4. SSE2 cpuid checks.
6167 */
6168FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6169{
6170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6171 if (IEM_IS_MODRM_REG_MODE(bRm))
6172 {
6173 /*
6174 * Register, register.
6175 */
6176 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6178
6179 IEM_MC_BEGIN(3, 0);
6180 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6181 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6182 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6183 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6184 IEM_MC_PREPARE_SSE_USAGE();
6185 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6186 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6187 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6188 IEM_MC_ADVANCE_RIP();
6189 IEM_MC_END();
6190 }
6191 else
6192 {
6193 /*
6194 * Register, memory.
6195 */
6196 IEM_MC_BEGIN(3, 2);
6197 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6198 IEM_MC_LOCAL(RTUINT128U, uSrc);
6199 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6201
6202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6203 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6204 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6206 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6207
6208 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6209 IEM_MC_PREPARE_SSE_USAGE();
6210 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6211 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6212
6213 IEM_MC_ADVANCE_RIP();
6214 IEM_MC_END();
6215 }
6216 return VINF_SUCCESS;
6217}
6218
6219
6220/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6221FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6222{
6223 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6224 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6225}
6226
6227
6228/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6229FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6230{
6231 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6232 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6233}
6234
6235
6236/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6237FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6238{
6239 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6240 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6241}
6242
6243
6244/**
6245 * Common worker for MMX instructions of the form:
6246 * psrlw mm, imm8
6247 * psraw mm, imm8
6248 * psllw mm, imm8
6249 * psrld mm, imm8
6250 * psrad mm, imm8
6251 * pslld mm, imm8
6252 * psrlq mm, imm8
6253 * psllq mm, imm8
6254 *
6255 */
6256FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6257{
6258 if (IEM_IS_MODRM_REG_MODE(bRm))
6259 {
6260 /*
6261 * Register, immediate.
6262 */
6263 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6265
6266 IEM_MC_BEGIN(2, 0);
6267 IEM_MC_ARG(uint64_t *, pDst, 0);
6268 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6269 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6270 IEM_MC_PREPARE_FPU_USAGE();
6271 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6272 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6273 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6274 IEM_MC_FPU_TO_MMX_MODE();
6275 IEM_MC_ADVANCE_RIP();
6276 IEM_MC_END();
6277 }
6278 else
6279 {
6280 /*
6281 * Register, memory not supported.
6282 */
6283 /// @todo Caller already enforced register mode?!
6284 }
6285 return VINF_SUCCESS;
6286}
6287
6288
6289/**
6290 * Common worker for SSE2 instructions of the form:
6291 * psrlw xmm, imm8
6292 * psraw xmm, imm8
6293 * psllw xmm, imm8
6294 * psrld xmm, imm8
6295 * psrad xmm, imm8
6296 * pslld xmm, imm8
6297 * psrlq xmm, imm8
6298 * psllq xmm, imm8
6299 *
6300 */
6301FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6302{
6303 if (IEM_IS_MODRM_REG_MODE(bRm))
6304 {
6305 /*
6306 * Register, immediate.
6307 */
6308 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6310
6311 IEM_MC_BEGIN(2, 0);
6312 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6313 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6314 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6315 IEM_MC_PREPARE_SSE_USAGE();
6316 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6317 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6318 IEM_MC_ADVANCE_RIP();
6319 IEM_MC_END();
6320 }
6321 else
6322 {
6323 /*
6324 * Register, memory.
6325 */
6326 /// @todo Caller already enforced register mode?!
6327 }
6328 return VINF_SUCCESS;
6329}
6330
6331
6332/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6333FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6334{
6335// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6336 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6337}
6338
6339
6340/** Opcode 0x66 0x0f 0x71 11/2. */
6341FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6342{
6343// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6344 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6345}
6346
6347
6348/** Opcode 0x0f 0x71 11/4. */
6349FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6350{
6351// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6352 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6353}
6354
6355
6356/** Opcode 0x66 0x0f 0x71 11/4. */
6357FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6358{
6359// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6360 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6361}
6362
6363
6364/** Opcode 0x0f 0x71 11/6. */
6365FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6366{
6367// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6368 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6369}
6370
6371
6372/** Opcode 0x66 0x0f 0x71 11/6. */
6373FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6374{
6375// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6376 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6377}
6378
6379
6380/**
6381 * Group 12 jump table for register variant.
6382 */
6383IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6384{
6385 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6386 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6387 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6388 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6389 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6390 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6391 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6392 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6393};
6394AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6395
6396
6397/** Opcode 0x0f 0x71. */
6398FNIEMOP_DEF(iemOp_Grp12)
6399{
6400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6401 if (IEM_IS_MODRM_REG_MODE(bRm))
6402 /* register, register */
6403 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6404 + pVCpu->iem.s.idxPrefix], bRm);
6405 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6406}
6407
6408
6409/** Opcode 0x0f 0x72 11/2. */
6410FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6411{
6412// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6413 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6414}
6415
6416
6417/** Opcode 0x66 0x0f 0x72 11/2. */
6418FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6419{
6420// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6421 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6422}
6423
6424
6425/** Opcode 0x0f 0x72 11/4. */
6426FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6427{
6428// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6429 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6430}
6431
6432
6433/** Opcode 0x66 0x0f 0x72 11/4. */
6434FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6435{
6436// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6437 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6438}
6439
6440
6441/** Opcode 0x0f 0x72 11/6. */
6442FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6443{
6444// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6445 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6446}
6447
6448/** Opcode 0x66 0x0f 0x72 11/6. */
6449FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6450{
6451// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6452 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6453}
6454
6455
6456/**
6457 * Group 13 jump table for register variant.
6458 */
6459IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6460{
6461 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6462 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6463 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6464 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6465 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6466 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6467 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6468 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6469};
6470AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6471
6472/** Opcode 0x0f 0x72. */
6473FNIEMOP_DEF(iemOp_Grp13)
6474{
6475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6476 if (IEM_IS_MODRM_REG_MODE(bRm))
6477 /* register, register */
6478 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6479 + pVCpu->iem.s.idxPrefix], bRm);
6480 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6481}
6482
6483
6484/** Opcode 0x0f 0x73 11/2. */
6485FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6486{
6487// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6488 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6489}
6490
6491
6492/** Opcode 0x66 0x0f 0x73 11/2. */
6493FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6494{
6495// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6496 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6497}
6498
6499
6500/** Opcode 0x66 0x0f 0x73 11/3. */
6501FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6502{
6503// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6504 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6505}
6506
6507
6508/** Opcode 0x0f 0x73 11/6. */
6509FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6510{
6511// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6512 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6513}
6514
6515
6516/** Opcode 0x66 0x0f 0x73 11/6. */
6517FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6518{
6519// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6520 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6521}
6522
6523
6524/** Opcode 0x66 0x0f 0x73 11/7. */
6525FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6526{
6527// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6528 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6529}
6530
6531/**
6532 * Group 14 jump table for register variant.
6533 */
6534IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6535{
6536 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6537 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6538 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6539 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6540 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6541 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6542 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6543 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6544};
6545AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6546
6547
6548/** Opcode 0x0f 0x73. */
6549FNIEMOP_DEF(iemOp_Grp14)
6550{
6551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6552 if (IEM_IS_MODRM_REG_MODE(bRm))
6553 /* register, register */
6554 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6555 + pVCpu->iem.s.idxPrefix], bRm);
6556 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6557}
6558
6559
6560/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6561FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6562{
6563 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6564 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6565}
6566
6567
6568/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6569FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6570{
6571 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6572 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
6573}
6574
6575
6576/* Opcode 0xf3 0x0f 0x74 - invalid */
6577/* Opcode 0xf2 0x0f 0x74 - invalid */
6578
6579
6580/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6581FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6582{
6583 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6584 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6585}
6586
6587
6588/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6589FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6590{
6591 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6592 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
6593}
6594
6595
6596/* Opcode 0xf3 0x0f 0x75 - invalid */
6597/* Opcode 0xf2 0x0f 0x75 - invalid */
6598
6599
6600/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6601FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6602{
6603 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6604 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6605}
6606
6607
6608/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6609FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6610{
6611 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6612 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
6613}
6614
6615
6616/* Opcode 0xf3 0x0f 0x76 - invalid */
6617/* Opcode 0xf2 0x0f 0x76 - invalid */
6618
6619
6620/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6621FNIEMOP_DEF(iemOp_emms)
6622{
6623 IEMOP_MNEMONIC(emms, "emms");
6624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6625
6626 IEM_MC_BEGIN(0,0);
6627 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6628 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6629 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6630 IEM_MC_FPU_FROM_MMX_MODE();
6631 IEM_MC_ADVANCE_RIP();
6632 IEM_MC_END();
6633 return VINF_SUCCESS;
6634}
6635
6636/* Opcode 0x66 0x0f 0x77 - invalid */
6637/* Opcode 0xf3 0x0f 0x77 - invalid */
6638/* Opcode 0xf2 0x0f 0x77 - invalid */
6639
6640/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6641#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6642FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6643{
6644 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6645 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6646 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6647 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
6648
6649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6650 if (IEM_IS_MODRM_REG_MODE(bRm))
6651 {
6652 /*
6653 * Register, register.
6654 */
6655 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6656 if (enmEffOpSize == IEMMODE_64BIT)
6657 {
6658 IEM_MC_BEGIN(2, 0);
6659 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6660 IEM_MC_ARG(uint64_t, u64Enc, 1);
6661 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6662 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6663 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6664 IEM_MC_END();
6665 }
6666 else
6667 {
6668 IEM_MC_BEGIN(2, 0);
6669 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6670 IEM_MC_ARG(uint32_t, u32Enc, 1);
6671 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6672 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6673 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
6674 IEM_MC_END();
6675 }
6676 }
6677 else
6678 {
6679 /*
6680 * Memory, register.
6681 */
6682 if (enmEffOpSize == IEMMODE_64BIT)
6683 {
6684 IEM_MC_BEGIN(3, 0);
6685 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6686 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6687 IEM_MC_ARG(uint64_t, u64Enc, 2);
6688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6689 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6690 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6691 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6692 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
6693 IEM_MC_END();
6694 }
6695 else
6696 {
6697 IEM_MC_BEGIN(3, 0);
6698 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6699 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6700 IEM_MC_ARG(uint32_t, u32Enc, 2);
6701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6702 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6703 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6704 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6705 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
6706 IEM_MC_END();
6707 }
6708 }
6709 return VINF_SUCCESS;
6710}
6711#else
6712FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
6713#endif
6714
6715/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
6716FNIEMOP_STUB(iemOp_AmdGrp17);
6717/* Opcode 0xf3 0x0f 0x78 - invalid */
6718/* Opcode 0xf2 0x0f 0x78 - invalid */
6719
6720/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
6721#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6722FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
6723{
6724 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
6725 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
6726 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
6727 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
6728
6729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6730 if (IEM_IS_MODRM_REG_MODE(bRm))
6731 {
6732 /*
6733 * Register, register.
6734 */
6735 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6736 if (enmEffOpSize == IEMMODE_64BIT)
6737 {
6738 IEM_MC_BEGIN(2, 0);
6739 IEM_MC_ARG(uint64_t, u64Val, 0);
6740 IEM_MC_ARG(uint64_t, u64Enc, 1);
6741 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6742 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6743 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
6744 IEM_MC_END();
6745 }
6746 else
6747 {
6748 IEM_MC_BEGIN(2, 0);
6749 IEM_MC_ARG(uint32_t, u32Val, 0);
6750 IEM_MC_ARG(uint32_t, u32Enc, 1);
6751 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
6752 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6753 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
6754 IEM_MC_END();
6755 }
6756 }
6757 else
6758 {
6759 /*
6760 * Register, memory.
6761 */
6762 if (enmEffOpSize == IEMMODE_64BIT)
6763 {
6764 IEM_MC_BEGIN(3, 0);
6765 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6766 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6767 IEM_MC_ARG(uint64_t, u64Enc, 2);
6768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6769 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6770 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6771 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6772 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
6773 IEM_MC_END();
6774 }
6775 else
6776 {
6777 IEM_MC_BEGIN(3, 0);
6778 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6779 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6780 IEM_MC_ARG(uint32_t, u32Enc, 2);
6781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6782 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6783 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6784 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6785 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
6786 IEM_MC_END();
6787 }
6788 }
6789 return VINF_SUCCESS;
6790}
6791#else
6792FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
6793#endif
6794/* Opcode 0x66 0x0f 0x79 - invalid */
6795/* Opcode 0xf3 0x0f 0x79 - invalid */
6796/* Opcode 0xf2 0x0f 0x79 - invalid */
6797
6798/* Opcode 0x0f 0x7a - invalid */
6799/* Opcode 0x66 0x0f 0x7a - invalid */
6800/* Opcode 0xf3 0x0f 0x7a - invalid */
6801/* Opcode 0xf2 0x0f 0x7a - invalid */
6802
6803/* Opcode 0x0f 0x7b - invalid */
6804/* Opcode 0x66 0x0f 0x7b - invalid */
6805/* Opcode 0xf3 0x0f 0x7b - invalid */
6806/* Opcode 0xf2 0x0f 0x7b - invalid */
6807
6808/* Opcode 0x0f 0x7c - invalid */
6809
6810
6811/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
6812FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
6813{
6814 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6815 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
6816}
6817
6818
6819/* Opcode 0xf3 0x0f 0x7c - invalid */
6820
6821
6822/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
6823FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
6824{
6825 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6826 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
6827}
6828
6829
6830/* Opcode 0x0f 0x7d - invalid */
6831
6832
6833/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
6834FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
6835{
6836 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6837 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
6838}
6839
6840
6841/* Opcode 0xf3 0x0f 0x7d - invalid */
6842
6843
6844/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
6845FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
6846{
6847 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6848 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
6849}
6850
6851
6852/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
6853FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
6854{
6855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6856 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6857 {
6858 /**
6859 * @opcode 0x7e
6860 * @opcodesub rex.w=1
6861 * @oppfx none
6862 * @opcpuid mmx
6863 * @opgroup og_mmx_datamove
6864 * @opxcpttype 5
6865 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6866 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6867 */
6868 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6869 if (IEM_IS_MODRM_REG_MODE(bRm))
6870 {
6871 /* greg64, MMX */
6872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6873 IEM_MC_BEGIN(0, 1);
6874 IEM_MC_LOCAL(uint64_t, u64Tmp);
6875
6876 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6877 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6878
6879 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6880 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
6881 IEM_MC_FPU_TO_MMX_MODE();
6882
6883 IEM_MC_ADVANCE_RIP();
6884 IEM_MC_END();
6885 }
6886 else
6887 {
6888 /* [mem64], MMX */
6889 IEM_MC_BEGIN(0, 2);
6890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6891 IEM_MC_LOCAL(uint64_t, u64Tmp);
6892
6893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6895 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6896 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6897
6898 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6899 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
6900 IEM_MC_FPU_TO_MMX_MODE();
6901
6902 IEM_MC_ADVANCE_RIP();
6903 IEM_MC_END();
6904 }
6905 }
6906 else
6907 {
6908 /**
6909 * @opdone
6910 * @opcode 0x7e
6911 * @opcodesub rex.w=0
6912 * @oppfx none
6913 * @opcpuid mmx
6914 * @opgroup og_mmx_datamove
6915 * @opxcpttype 5
6916 * @opfunction iemOp_movd_q_Pd_Ey
6917 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6918 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6919 */
6920 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6921 if (IEM_IS_MODRM_REG_MODE(bRm))
6922 {
6923 /* greg32, MMX */
6924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6925 IEM_MC_BEGIN(0, 1);
6926 IEM_MC_LOCAL(uint32_t, u32Tmp);
6927
6928 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6929 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6930
6931 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
6932 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
6933 IEM_MC_FPU_TO_MMX_MODE();
6934
6935 IEM_MC_ADVANCE_RIP();
6936 IEM_MC_END();
6937 }
6938 else
6939 {
6940 /* [mem32], MMX */
6941 IEM_MC_BEGIN(0, 2);
6942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6943 IEM_MC_LOCAL(uint32_t, u32Tmp);
6944
6945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6947 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6948 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6949
6950 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
6951 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
6952 IEM_MC_FPU_TO_MMX_MODE();
6953
6954 IEM_MC_ADVANCE_RIP();
6955 IEM_MC_END();
6956 }
6957 }
6958 return VINF_SUCCESS;
6959
6960}
6961
6962
6963FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
6964{
6965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6966 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6967 {
6968 /**
6969 * @opcode 0x7e
6970 * @opcodesub rex.w=1
6971 * @oppfx 0x66
6972 * @opcpuid sse2
6973 * @opgroup og_sse2_simdint_datamove
6974 * @opxcpttype 5
6975 * @optest 64-bit / op1=1 op2=2 -> op1=2
6976 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6977 */
6978 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6979 if (IEM_IS_MODRM_REG_MODE(bRm))
6980 {
6981 /* greg64, XMM */
6982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6983 IEM_MC_BEGIN(0, 1);
6984 IEM_MC_LOCAL(uint64_t, u64Tmp);
6985
6986 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6987 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6988
6989 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
6990 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
6991
6992 IEM_MC_ADVANCE_RIP();
6993 IEM_MC_END();
6994 }
6995 else
6996 {
6997 /* [mem64], XMM */
6998 IEM_MC_BEGIN(0, 2);
6999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7000 IEM_MC_LOCAL(uint64_t, u64Tmp);
7001
7002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7004 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7005 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7006
7007 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7008 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7009
7010 IEM_MC_ADVANCE_RIP();
7011 IEM_MC_END();
7012 }
7013 }
7014 else
7015 {
7016 /**
7017 * @opdone
7018 * @opcode 0x7e
7019 * @opcodesub rex.w=0
7020 * @oppfx 0x66
7021 * @opcpuid sse2
7022 * @opgroup og_sse2_simdint_datamove
7023 * @opxcpttype 5
7024 * @opfunction iemOp_movd_q_Vy_Ey
7025 * @optest op1=1 op2=2 -> op1=2
7026 * @optest op1=0 op2=-42 -> op1=-42
7027 */
7028 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7029 if (IEM_IS_MODRM_REG_MODE(bRm))
7030 {
7031 /* greg32, XMM */
7032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7033 IEM_MC_BEGIN(0, 1);
7034 IEM_MC_LOCAL(uint32_t, u32Tmp);
7035
7036 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7037 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7038
7039 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7040 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7041
7042 IEM_MC_ADVANCE_RIP();
7043 IEM_MC_END();
7044 }
7045 else
7046 {
7047 /* [mem32], XMM */
7048 IEM_MC_BEGIN(0, 2);
7049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7050 IEM_MC_LOCAL(uint32_t, u32Tmp);
7051
7052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7054 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7055 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7056
7057 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7058 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7059
7060 IEM_MC_ADVANCE_RIP();
7061 IEM_MC_END();
7062 }
7063 }
7064 return VINF_SUCCESS;
7065
7066}
7067
7068/**
7069 * @opcode 0x7e
7070 * @oppfx 0xf3
7071 * @opcpuid sse2
7072 * @opgroup og_sse2_pcksclr_datamove
7073 * @opxcpttype none
7074 * @optest op1=1 op2=2 -> op1=2
7075 * @optest op1=0 op2=-42 -> op1=-42
7076 */
7077FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7078{
7079 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7081 if (IEM_IS_MODRM_REG_MODE(bRm))
7082 {
7083 /*
7084 * Register, register.
7085 */
7086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7087 IEM_MC_BEGIN(0, 2);
7088 IEM_MC_LOCAL(uint64_t, uSrc);
7089
7090 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7091 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7092
7093 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
7094 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7095
7096 IEM_MC_ADVANCE_RIP();
7097 IEM_MC_END();
7098 }
7099 else
7100 {
7101 /*
7102 * Memory, register.
7103 */
7104 IEM_MC_BEGIN(0, 2);
7105 IEM_MC_LOCAL(uint64_t, uSrc);
7106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7107
7108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7111 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7112
7113 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7114 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7115
7116 IEM_MC_ADVANCE_RIP();
7117 IEM_MC_END();
7118 }
7119 return VINF_SUCCESS;
7120}
7121
7122/* Opcode 0xf2 0x0f 0x7e - invalid */
7123
7124
7125/** Opcode 0x0f 0x7f - movq Qq, Pq */
7126FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7127{
7128 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7130 if (IEM_IS_MODRM_REG_MODE(bRm))
7131 {
7132 /*
7133 * Register, register.
7134 */
7135 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7136 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7138 IEM_MC_BEGIN(0, 1);
7139 IEM_MC_LOCAL(uint64_t, u64Tmp);
7140 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7141 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7142 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7143 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7144 IEM_MC_FPU_TO_MMX_MODE();
7145 IEM_MC_ADVANCE_RIP();
7146 IEM_MC_END();
7147 }
7148 else
7149 {
7150 /*
7151 * Memory, Register.
7152 */
7153 IEM_MC_BEGIN(0, 2);
7154 IEM_MC_LOCAL(uint64_t, u64Tmp);
7155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7156
7157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7159 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7160 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7161
7162 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7163 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7164 IEM_MC_FPU_TO_MMX_MODE();
7165
7166 IEM_MC_ADVANCE_RIP();
7167 IEM_MC_END();
7168 }
7169 return VINF_SUCCESS;
7170}
7171
7172/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7173FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7174{
7175 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7177 if (IEM_IS_MODRM_REG_MODE(bRm))
7178 {
7179 /*
7180 * Register, register.
7181 */
7182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7183 IEM_MC_BEGIN(0, 0);
7184 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7185 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7186 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7187 IEM_GET_MODRM_REG(pVCpu, bRm));
7188 IEM_MC_ADVANCE_RIP();
7189 IEM_MC_END();
7190 }
7191 else
7192 {
7193 /*
7194 * Register, memory.
7195 */
7196 IEM_MC_BEGIN(0, 2);
7197 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7199
7200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7202 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7204
7205 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7206 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7207
7208 IEM_MC_ADVANCE_RIP();
7209 IEM_MC_END();
7210 }
7211 return VINF_SUCCESS;
7212}
7213
7214/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7215FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7216{
7217 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7219 if (IEM_IS_MODRM_REG_MODE(bRm))
7220 {
7221 /*
7222 * Register, register.
7223 */
7224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7225 IEM_MC_BEGIN(0, 0);
7226 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7227 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7228 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7229 IEM_GET_MODRM_REG(pVCpu, bRm));
7230 IEM_MC_ADVANCE_RIP();
7231 IEM_MC_END();
7232 }
7233 else
7234 {
7235 /*
7236 * Register, memory.
7237 */
7238 IEM_MC_BEGIN(0, 2);
7239 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7241
7242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7244 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7245 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7246
7247 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7248 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7249
7250 IEM_MC_ADVANCE_RIP();
7251 IEM_MC_END();
7252 }
7253 return VINF_SUCCESS;
7254}
7255
7256/* Opcode 0xf2 0x0f 0x7f - invalid */
7257
7258
7259
7260/** Opcode 0x0f 0x80. */
7261FNIEMOP_DEF(iemOp_jo_Jv)
7262{
7263 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7264 IEMOP_HLP_MIN_386();
7265 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7266 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7267 {
7268 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7270
7271 IEM_MC_BEGIN(0, 0);
7272 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7273 IEM_MC_REL_JMP_S16(i16Imm);
7274 } IEM_MC_ELSE() {
7275 IEM_MC_ADVANCE_RIP();
7276 } IEM_MC_ENDIF();
7277 IEM_MC_END();
7278 }
7279 else
7280 {
7281 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7283
7284 IEM_MC_BEGIN(0, 0);
7285 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7286 IEM_MC_REL_JMP_S32(i32Imm);
7287 } IEM_MC_ELSE() {
7288 IEM_MC_ADVANCE_RIP();
7289 } IEM_MC_ENDIF();
7290 IEM_MC_END();
7291 }
7292 return VINF_SUCCESS;
7293}
7294
7295
7296/** Opcode 0x0f 0x81. */
7297FNIEMOP_DEF(iemOp_jno_Jv)
7298{
7299 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7300 IEMOP_HLP_MIN_386();
7301 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7302 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7303 {
7304 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7306
7307 IEM_MC_BEGIN(0, 0);
7308 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7309 IEM_MC_ADVANCE_RIP();
7310 } IEM_MC_ELSE() {
7311 IEM_MC_REL_JMP_S16(i16Imm);
7312 } IEM_MC_ENDIF();
7313 IEM_MC_END();
7314 }
7315 else
7316 {
7317 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7319
7320 IEM_MC_BEGIN(0, 0);
7321 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7322 IEM_MC_ADVANCE_RIP();
7323 } IEM_MC_ELSE() {
7324 IEM_MC_REL_JMP_S32(i32Imm);
7325 } IEM_MC_ENDIF();
7326 IEM_MC_END();
7327 }
7328 return VINF_SUCCESS;
7329}
7330
7331
7332/** Opcode 0x0f 0x82. */
7333FNIEMOP_DEF(iemOp_jc_Jv)
7334{
7335 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7336 IEMOP_HLP_MIN_386();
7337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7338 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7339 {
7340 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7342
7343 IEM_MC_BEGIN(0, 0);
7344 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7345 IEM_MC_REL_JMP_S16(i16Imm);
7346 } IEM_MC_ELSE() {
7347 IEM_MC_ADVANCE_RIP();
7348 } IEM_MC_ENDIF();
7349 IEM_MC_END();
7350 }
7351 else
7352 {
7353 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7355
7356 IEM_MC_BEGIN(0, 0);
7357 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7358 IEM_MC_REL_JMP_S32(i32Imm);
7359 } IEM_MC_ELSE() {
7360 IEM_MC_ADVANCE_RIP();
7361 } IEM_MC_ENDIF();
7362 IEM_MC_END();
7363 }
7364 return VINF_SUCCESS;
7365}
7366
7367
7368/** Opcode 0x0f 0x83. */
7369FNIEMOP_DEF(iemOp_jnc_Jv)
7370{
7371 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7372 IEMOP_HLP_MIN_386();
7373 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7374 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7375 {
7376 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7378
7379 IEM_MC_BEGIN(0, 0);
7380 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7381 IEM_MC_ADVANCE_RIP();
7382 } IEM_MC_ELSE() {
7383 IEM_MC_REL_JMP_S16(i16Imm);
7384 } IEM_MC_ENDIF();
7385 IEM_MC_END();
7386 }
7387 else
7388 {
7389 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7391
7392 IEM_MC_BEGIN(0, 0);
7393 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7394 IEM_MC_ADVANCE_RIP();
7395 } IEM_MC_ELSE() {
7396 IEM_MC_REL_JMP_S32(i32Imm);
7397 } IEM_MC_ENDIF();
7398 IEM_MC_END();
7399 }
7400 return VINF_SUCCESS;
7401}
7402
7403
7404/** Opcode 0x0f 0x84. */
7405FNIEMOP_DEF(iemOp_je_Jv)
7406{
7407 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7408 IEMOP_HLP_MIN_386();
7409 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7410 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7411 {
7412 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7414
7415 IEM_MC_BEGIN(0, 0);
7416 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7417 IEM_MC_REL_JMP_S16(i16Imm);
7418 } IEM_MC_ELSE() {
7419 IEM_MC_ADVANCE_RIP();
7420 } IEM_MC_ENDIF();
7421 IEM_MC_END();
7422 }
7423 else
7424 {
7425 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7427
7428 IEM_MC_BEGIN(0, 0);
7429 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7430 IEM_MC_REL_JMP_S32(i32Imm);
7431 } IEM_MC_ELSE() {
7432 IEM_MC_ADVANCE_RIP();
7433 } IEM_MC_ENDIF();
7434 IEM_MC_END();
7435 }
7436 return VINF_SUCCESS;
7437}
7438
7439
7440/** Opcode 0x0f 0x85. */
7441FNIEMOP_DEF(iemOp_jne_Jv)
7442{
7443 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7444 IEMOP_HLP_MIN_386();
7445 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7446 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7447 {
7448 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7450
7451 IEM_MC_BEGIN(0, 0);
7452 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7453 IEM_MC_ADVANCE_RIP();
7454 } IEM_MC_ELSE() {
7455 IEM_MC_REL_JMP_S16(i16Imm);
7456 } IEM_MC_ENDIF();
7457 IEM_MC_END();
7458 }
7459 else
7460 {
7461 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7463
7464 IEM_MC_BEGIN(0, 0);
7465 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7466 IEM_MC_ADVANCE_RIP();
7467 } IEM_MC_ELSE() {
7468 IEM_MC_REL_JMP_S32(i32Imm);
7469 } IEM_MC_ENDIF();
7470 IEM_MC_END();
7471 }
7472 return VINF_SUCCESS;
7473}
7474
7475
7476/** Opcode 0x0f 0x86. */
7477FNIEMOP_DEF(iemOp_jbe_Jv)
7478{
7479 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7480 IEMOP_HLP_MIN_386();
7481 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7482 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7483 {
7484 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7486
7487 IEM_MC_BEGIN(0, 0);
7488 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7489 IEM_MC_REL_JMP_S16(i16Imm);
7490 } IEM_MC_ELSE() {
7491 IEM_MC_ADVANCE_RIP();
7492 } IEM_MC_ENDIF();
7493 IEM_MC_END();
7494 }
7495 else
7496 {
7497 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7499
7500 IEM_MC_BEGIN(0, 0);
7501 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7502 IEM_MC_REL_JMP_S32(i32Imm);
7503 } IEM_MC_ELSE() {
7504 IEM_MC_ADVANCE_RIP();
7505 } IEM_MC_ENDIF();
7506 IEM_MC_END();
7507 }
7508 return VINF_SUCCESS;
7509}
7510
7511
7512/** Opcode 0x0f 0x87. */
7513FNIEMOP_DEF(iemOp_jnbe_Jv)
7514{
7515 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7516 IEMOP_HLP_MIN_386();
7517 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7518 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7519 {
7520 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7522
7523 IEM_MC_BEGIN(0, 0);
7524 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7525 IEM_MC_ADVANCE_RIP();
7526 } IEM_MC_ELSE() {
7527 IEM_MC_REL_JMP_S16(i16Imm);
7528 } IEM_MC_ENDIF();
7529 IEM_MC_END();
7530 }
7531 else
7532 {
7533 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7535
7536 IEM_MC_BEGIN(0, 0);
7537 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7538 IEM_MC_ADVANCE_RIP();
7539 } IEM_MC_ELSE() {
7540 IEM_MC_REL_JMP_S32(i32Imm);
7541 } IEM_MC_ENDIF();
7542 IEM_MC_END();
7543 }
7544 return VINF_SUCCESS;
7545}
7546
7547
7548/** Opcode 0x0f 0x88. */
7549FNIEMOP_DEF(iemOp_js_Jv)
7550{
7551 IEMOP_MNEMONIC(js_Jv, "js Jv");
7552 IEMOP_HLP_MIN_386();
7553 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7554 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7555 {
7556 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7558
7559 IEM_MC_BEGIN(0, 0);
7560 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7561 IEM_MC_REL_JMP_S16(i16Imm);
7562 } IEM_MC_ELSE() {
7563 IEM_MC_ADVANCE_RIP();
7564 } IEM_MC_ENDIF();
7565 IEM_MC_END();
7566 }
7567 else
7568 {
7569 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7571
7572 IEM_MC_BEGIN(0, 0);
7573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7574 IEM_MC_REL_JMP_S32(i32Imm);
7575 } IEM_MC_ELSE() {
7576 IEM_MC_ADVANCE_RIP();
7577 } IEM_MC_ENDIF();
7578 IEM_MC_END();
7579 }
7580 return VINF_SUCCESS;
7581}
7582
7583
7584/** Opcode 0x0f 0x89. */
7585FNIEMOP_DEF(iemOp_jns_Jv)
7586{
7587 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7588 IEMOP_HLP_MIN_386();
7589 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7590 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7591 {
7592 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7594
7595 IEM_MC_BEGIN(0, 0);
7596 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7597 IEM_MC_ADVANCE_RIP();
7598 } IEM_MC_ELSE() {
7599 IEM_MC_REL_JMP_S16(i16Imm);
7600 } IEM_MC_ENDIF();
7601 IEM_MC_END();
7602 }
7603 else
7604 {
7605 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7607
7608 IEM_MC_BEGIN(0, 0);
7609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7610 IEM_MC_ADVANCE_RIP();
7611 } IEM_MC_ELSE() {
7612 IEM_MC_REL_JMP_S32(i32Imm);
7613 } IEM_MC_ENDIF();
7614 IEM_MC_END();
7615 }
7616 return VINF_SUCCESS;
7617}
7618
7619
7620/** Opcode 0x0f 0x8a. */
7621FNIEMOP_DEF(iemOp_jp_Jv)
7622{
7623 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7624 IEMOP_HLP_MIN_386();
7625 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7626 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7627 {
7628 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7630
7631 IEM_MC_BEGIN(0, 0);
7632 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7633 IEM_MC_REL_JMP_S16(i16Imm);
7634 } IEM_MC_ELSE() {
7635 IEM_MC_ADVANCE_RIP();
7636 } IEM_MC_ENDIF();
7637 IEM_MC_END();
7638 }
7639 else
7640 {
7641 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7643
7644 IEM_MC_BEGIN(0, 0);
7645 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7646 IEM_MC_REL_JMP_S32(i32Imm);
7647 } IEM_MC_ELSE() {
7648 IEM_MC_ADVANCE_RIP();
7649 } IEM_MC_ENDIF();
7650 IEM_MC_END();
7651 }
7652 return VINF_SUCCESS;
7653}
7654
7655
7656/** Opcode 0x0f 0x8b. */
7657FNIEMOP_DEF(iemOp_jnp_Jv)
7658{
7659 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7660 IEMOP_HLP_MIN_386();
7661 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7662 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7663 {
7664 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7666
7667 IEM_MC_BEGIN(0, 0);
7668 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7669 IEM_MC_ADVANCE_RIP();
7670 } IEM_MC_ELSE() {
7671 IEM_MC_REL_JMP_S16(i16Imm);
7672 } IEM_MC_ENDIF();
7673 IEM_MC_END();
7674 }
7675 else
7676 {
7677 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7679
7680 IEM_MC_BEGIN(0, 0);
7681 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7682 IEM_MC_ADVANCE_RIP();
7683 } IEM_MC_ELSE() {
7684 IEM_MC_REL_JMP_S32(i32Imm);
7685 } IEM_MC_ENDIF();
7686 IEM_MC_END();
7687 }
7688 return VINF_SUCCESS;
7689}
7690
7691
7692/** Opcode 0x0f 0x8c. */
7693FNIEMOP_DEF(iemOp_jl_Jv)
7694{
7695 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7696 IEMOP_HLP_MIN_386();
7697 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7698 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7699 {
7700 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7702
7703 IEM_MC_BEGIN(0, 0);
7704 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7705 IEM_MC_REL_JMP_S16(i16Imm);
7706 } IEM_MC_ELSE() {
7707 IEM_MC_ADVANCE_RIP();
7708 } IEM_MC_ENDIF();
7709 IEM_MC_END();
7710 }
7711 else
7712 {
7713 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7715
7716 IEM_MC_BEGIN(0, 0);
7717 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7718 IEM_MC_REL_JMP_S32(i32Imm);
7719 } IEM_MC_ELSE() {
7720 IEM_MC_ADVANCE_RIP();
7721 } IEM_MC_ENDIF();
7722 IEM_MC_END();
7723 }
7724 return VINF_SUCCESS;
7725}
7726
7727
7728/** Opcode 0x0f 0x8d. */
7729FNIEMOP_DEF(iemOp_jnl_Jv)
7730{
7731 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
7732 IEMOP_HLP_MIN_386();
7733 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7734 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7735 {
7736 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7738
7739 IEM_MC_BEGIN(0, 0);
7740 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7741 IEM_MC_ADVANCE_RIP();
7742 } IEM_MC_ELSE() {
7743 IEM_MC_REL_JMP_S16(i16Imm);
7744 } IEM_MC_ENDIF();
7745 IEM_MC_END();
7746 }
7747 else
7748 {
7749 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7751
7752 IEM_MC_BEGIN(0, 0);
7753 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7754 IEM_MC_ADVANCE_RIP();
7755 } IEM_MC_ELSE() {
7756 IEM_MC_REL_JMP_S32(i32Imm);
7757 } IEM_MC_ENDIF();
7758 IEM_MC_END();
7759 }
7760 return VINF_SUCCESS;
7761}
7762
7763
7764/** Opcode 0x0f 0x8e. */
7765FNIEMOP_DEF(iemOp_jle_Jv)
7766{
7767 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
7768 IEMOP_HLP_MIN_386();
7769 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7770 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7771 {
7772 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7774
7775 IEM_MC_BEGIN(0, 0);
7776 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7777 IEM_MC_REL_JMP_S16(i16Imm);
7778 } IEM_MC_ELSE() {
7779 IEM_MC_ADVANCE_RIP();
7780 } IEM_MC_ENDIF();
7781 IEM_MC_END();
7782 }
7783 else
7784 {
7785 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7787
7788 IEM_MC_BEGIN(0, 0);
7789 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7790 IEM_MC_REL_JMP_S32(i32Imm);
7791 } IEM_MC_ELSE() {
7792 IEM_MC_ADVANCE_RIP();
7793 } IEM_MC_ENDIF();
7794 IEM_MC_END();
7795 }
7796 return VINF_SUCCESS;
7797}
7798
7799
7800/** Opcode 0x0f 0x8f. */
7801FNIEMOP_DEF(iemOp_jnle_Jv)
7802{
7803 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
7804 IEMOP_HLP_MIN_386();
7805 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7806 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7807 {
7808 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7810
7811 IEM_MC_BEGIN(0, 0);
7812 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7813 IEM_MC_ADVANCE_RIP();
7814 } IEM_MC_ELSE() {
7815 IEM_MC_REL_JMP_S16(i16Imm);
7816 } IEM_MC_ENDIF();
7817 IEM_MC_END();
7818 }
7819 else
7820 {
7821 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7823
7824 IEM_MC_BEGIN(0, 0);
7825 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7826 IEM_MC_ADVANCE_RIP();
7827 } IEM_MC_ELSE() {
7828 IEM_MC_REL_JMP_S32(i32Imm);
7829 } IEM_MC_ENDIF();
7830 IEM_MC_END();
7831 }
7832 return VINF_SUCCESS;
7833}
7834
7835
7836/** Opcode 0x0f 0x90. */
7837FNIEMOP_DEF(iemOp_seto_Eb)
7838{
7839 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
7840 IEMOP_HLP_MIN_386();
7841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7842
7843 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7844 * any way. AMD says it's "unused", whatever that means. We're
7845 * ignoring for now. */
7846 if (IEM_IS_MODRM_REG_MODE(bRm))
7847 {
7848 /* register target */
7849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7850 IEM_MC_BEGIN(0, 0);
7851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7852 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7853 } IEM_MC_ELSE() {
7854 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7855 } IEM_MC_ENDIF();
7856 IEM_MC_ADVANCE_RIP();
7857 IEM_MC_END();
7858 }
7859 else
7860 {
7861 /* memory target */
7862 IEM_MC_BEGIN(0, 1);
7863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7867 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7868 } IEM_MC_ELSE() {
7869 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7870 } IEM_MC_ENDIF();
7871 IEM_MC_ADVANCE_RIP();
7872 IEM_MC_END();
7873 }
7874 return VINF_SUCCESS;
7875}
7876
7877
7878/** Opcode 0x0f 0x91. */
7879FNIEMOP_DEF(iemOp_setno_Eb)
7880{
7881 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
7882 IEMOP_HLP_MIN_386();
7883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7884
7885 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7886 * any way. AMD says it's "unused", whatever that means. We're
7887 * ignoring for now. */
7888 if (IEM_IS_MODRM_REG_MODE(bRm))
7889 {
7890 /* register target */
7891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7892 IEM_MC_BEGIN(0, 0);
7893 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7894 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7895 } IEM_MC_ELSE() {
7896 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7897 } IEM_MC_ENDIF();
7898 IEM_MC_ADVANCE_RIP();
7899 IEM_MC_END();
7900 }
7901 else
7902 {
7903 /* memory target */
7904 IEM_MC_BEGIN(0, 1);
7905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7908 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7909 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7910 } IEM_MC_ELSE() {
7911 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7912 } IEM_MC_ENDIF();
7913 IEM_MC_ADVANCE_RIP();
7914 IEM_MC_END();
7915 }
7916 return VINF_SUCCESS;
7917}
7918
7919
7920/** Opcode 0x0f 0x92. */
7921FNIEMOP_DEF(iemOp_setc_Eb)
7922{
7923 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
7924 IEMOP_HLP_MIN_386();
7925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7926
7927 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7928 * any way. AMD says it's "unused", whatever that means. We're
7929 * ignoring for now. */
7930 if (IEM_IS_MODRM_REG_MODE(bRm))
7931 {
7932 /* register target */
7933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7934 IEM_MC_BEGIN(0, 0);
7935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7936 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7937 } IEM_MC_ELSE() {
7938 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7939 } IEM_MC_ENDIF();
7940 IEM_MC_ADVANCE_RIP();
7941 IEM_MC_END();
7942 }
7943 else
7944 {
7945 /* memory target */
7946 IEM_MC_BEGIN(0, 1);
7947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7951 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7952 } IEM_MC_ELSE() {
7953 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7954 } IEM_MC_ENDIF();
7955 IEM_MC_ADVANCE_RIP();
7956 IEM_MC_END();
7957 }
7958 return VINF_SUCCESS;
7959}
7960
7961
7962/** Opcode 0x0f 0x93. */
7963FNIEMOP_DEF(iemOp_setnc_Eb)
7964{
7965 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
7966 IEMOP_HLP_MIN_386();
7967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7968
7969 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7970 * any way. AMD says it's "unused", whatever that means. We're
7971 * ignoring for now. */
7972 if (IEM_IS_MODRM_REG_MODE(bRm))
7973 {
7974 /* register target */
7975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7976 IEM_MC_BEGIN(0, 0);
7977 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7978 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7979 } IEM_MC_ELSE() {
7980 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7981 } IEM_MC_ENDIF();
7982 IEM_MC_ADVANCE_RIP();
7983 IEM_MC_END();
7984 }
7985 else
7986 {
7987 /* memory target */
7988 IEM_MC_BEGIN(0, 1);
7989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7993 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7994 } IEM_MC_ELSE() {
7995 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7996 } IEM_MC_ENDIF();
7997 IEM_MC_ADVANCE_RIP();
7998 IEM_MC_END();
7999 }
8000 return VINF_SUCCESS;
8001}
8002
8003
8004/** Opcode 0x0f 0x94. */
8005FNIEMOP_DEF(iemOp_sete_Eb)
8006{
8007 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8008 IEMOP_HLP_MIN_386();
8009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8010
8011 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8012 * any way. AMD says it's "unused", whatever that means. We're
8013 * ignoring for now. */
8014 if (IEM_IS_MODRM_REG_MODE(bRm))
8015 {
8016 /* register target */
8017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8018 IEM_MC_BEGIN(0, 0);
8019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8020 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8021 } IEM_MC_ELSE() {
8022 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8023 } IEM_MC_ENDIF();
8024 IEM_MC_ADVANCE_RIP();
8025 IEM_MC_END();
8026 }
8027 else
8028 {
8029 /* memory target */
8030 IEM_MC_BEGIN(0, 1);
8031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8034 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8035 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8036 } IEM_MC_ELSE() {
8037 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8038 } IEM_MC_ENDIF();
8039 IEM_MC_ADVANCE_RIP();
8040 IEM_MC_END();
8041 }
8042 return VINF_SUCCESS;
8043}
8044
8045
8046/** Opcode 0x0f 0x95. */
8047FNIEMOP_DEF(iemOp_setne_Eb)
8048{
8049 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8050 IEMOP_HLP_MIN_386();
8051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8052
8053 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8054 * any way. AMD says it's "unused", whatever that means. We're
8055 * ignoring for now. */
8056 if (IEM_IS_MODRM_REG_MODE(bRm))
8057 {
8058 /* register target */
8059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8060 IEM_MC_BEGIN(0, 0);
8061 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8062 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8063 } IEM_MC_ELSE() {
8064 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8065 } IEM_MC_ENDIF();
8066 IEM_MC_ADVANCE_RIP();
8067 IEM_MC_END();
8068 }
8069 else
8070 {
8071 /* memory target */
8072 IEM_MC_BEGIN(0, 1);
8073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8076 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8077 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8078 } IEM_MC_ELSE() {
8079 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8080 } IEM_MC_ENDIF();
8081 IEM_MC_ADVANCE_RIP();
8082 IEM_MC_END();
8083 }
8084 return VINF_SUCCESS;
8085}
8086
8087
8088/** Opcode 0x0f 0x96. */
8089FNIEMOP_DEF(iemOp_setbe_Eb)
8090{
8091 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8092 IEMOP_HLP_MIN_386();
8093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8094
8095 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8096 * any way. AMD says it's "unused", whatever that means. We're
8097 * ignoring for now. */
8098 if (IEM_IS_MODRM_REG_MODE(bRm))
8099 {
8100 /* register target */
8101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8102 IEM_MC_BEGIN(0, 0);
8103 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8104 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8105 } IEM_MC_ELSE() {
8106 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8107 } IEM_MC_ENDIF();
8108 IEM_MC_ADVANCE_RIP();
8109 IEM_MC_END();
8110 }
8111 else
8112 {
8113 /* memory target */
8114 IEM_MC_BEGIN(0, 1);
8115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8118 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8119 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8120 } IEM_MC_ELSE() {
8121 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8122 } IEM_MC_ENDIF();
8123 IEM_MC_ADVANCE_RIP();
8124 IEM_MC_END();
8125 }
8126 return VINF_SUCCESS;
8127}
8128
8129
8130/** Opcode 0x0f 0x97. */
8131FNIEMOP_DEF(iemOp_setnbe_Eb)
8132{
8133 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8134 IEMOP_HLP_MIN_386();
8135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8136
8137 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8138 * any way. AMD says it's "unused", whatever that means. We're
8139 * ignoring for now. */
8140 if (IEM_IS_MODRM_REG_MODE(bRm))
8141 {
8142 /* register target */
8143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8144 IEM_MC_BEGIN(0, 0);
8145 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8146 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8147 } IEM_MC_ELSE() {
8148 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8149 } IEM_MC_ENDIF();
8150 IEM_MC_ADVANCE_RIP();
8151 IEM_MC_END();
8152 }
8153 else
8154 {
8155 /* memory target */
8156 IEM_MC_BEGIN(0, 1);
8157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8160 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8161 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8162 } IEM_MC_ELSE() {
8163 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8164 } IEM_MC_ENDIF();
8165 IEM_MC_ADVANCE_RIP();
8166 IEM_MC_END();
8167 }
8168 return VINF_SUCCESS;
8169}
8170
8171
8172/** Opcode 0x0f 0x98. */
8173FNIEMOP_DEF(iemOp_sets_Eb)
8174{
8175 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8176 IEMOP_HLP_MIN_386();
8177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8178
8179 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8180 * any way. AMD says it's "unused", whatever that means. We're
8181 * ignoring for now. */
8182 if (IEM_IS_MODRM_REG_MODE(bRm))
8183 {
8184 /* register target */
8185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8186 IEM_MC_BEGIN(0, 0);
8187 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8188 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8189 } IEM_MC_ELSE() {
8190 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8191 } IEM_MC_ENDIF();
8192 IEM_MC_ADVANCE_RIP();
8193 IEM_MC_END();
8194 }
8195 else
8196 {
8197 /* memory target */
8198 IEM_MC_BEGIN(0, 1);
8199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8202 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8203 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8204 } IEM_MC_ELSE() {
8205 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8206 } IEM_MC_ENDIF();
8207 IEM_MC_ADVANCE_RIP();
8208 IEM_MC_END();
8209 }
8210 return VINF_SUCCESS;
8211}
8212
8213
8214/** Opcode 0x0f 0x99. */
8215FNIEMOP_DEF(iemOp_setns_Eb)
8216{
8217 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8218 IEMOP_HLP_MIN_386();
8219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8220
8221 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8222 * any way. AMD says it's "unused", whatever that means. We're
8223 * ignoring for now. */
8224 if (IEM_IS_MODRM_REG_MODE(bRm))
8225 {
8226 /* register target */
8227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8228 IEM_MC_BEGIN(0, 0);
8229 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8230 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8231 } IEM_MC_ELSE() {
8232 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8233 } IEM_MC_ENDIF();
8234 IEM_MC_ADVANCE_RIP();
8235 IEM_MC_END();
8236 }
8237 else
8238 {
8239 /* memory target */
8240 IEM_MC_BEGIN(0, 1);
8241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8244 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8245 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8246 } IEM_MC_ELSE() {
8247 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8248 } IEM_MC_ENDIF();
8249 IEM_MC_ADVANCE_RIP();
8250 IEM_MC_END();
8251 }
8252 return VINF_SUCCESS;
8253}
8254
8255
8256/** Opcode 0x0f 0x9a. */
8257FNIEMOP_DEF(iemOp_setp_Eb)
8258{
8259 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8260 IEMOP_HLP_MIN_386();
8261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8262
8263 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8264 * any way. AMD says it's "unused", whatever that means. We're
8265 * ignoring for now. */
8266 if (IEM_IS_MODRM_REG_MODE(bRm))
8267 {
8268 /* register target */
8269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8270 IEM_MC_BEGIN(0, 0);
8271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8272 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8273 } IEM_MC_ELSE() {
8274 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8275 } IEM_MC_ENDIF();
8276 IEM_MC_ADVANCE_RIP();
8277 IEM_MC_END();
8278 }
8279 else
8280 {
8281 /* memory target */
8282 IEM_MC_BEGIN(0, 1);
8283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8286 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8287 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8288 } IEM_MC_ELSE() {
8289 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8290 } IEM_MC_ENDIF();
8291 IEM_MC_ADVANCE_RIP();
8292 IEM_MC_END();
8293 }
8294 return VINF_SUCCESS;
8295}
8296
8297
8298/** Opcode 0x0f 0x9b. */
8299FNIEMOP_DEF(iemOp_setnp_Eb)
8300{
8301 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8302 IEMOP_HLP_MIN_386();
8303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8304
8305 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8306 * any way. AMD says it's "unused", whatever that means. We're
8307 * ignoring for now. */
8308 if (IEM_IS_MODRM_REG_MODE(bRm))
8309 {
8310 /* register target */
8311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8312 IEM_MC_BEGIN(0, 0);
8313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8314 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8315 } IEM_MC_ELSE() {
8316 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8317 } IEM_MC_ENDIF();
8318 IEM_MC_ADVANCE_RIP();
8319 IEM_MC_END();
8320 }
8321 else
8322 {
8323 /* memory target */
8324 IEM_MC_BEGIN(0, 1);
8325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8328 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8329 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8330 } IEM_MC_ELSE() {
8331 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8332 } IEM_MC_ENDIF();
8333 IEM_MC_ADVANCE_RIP();
8334 IEM_MC_END();
8335 }
8336 return VINF_SUCCESS;
8337}
8338
8339
8340/** Opcode 0x0f 0x9c. */
8341FNIEMOP_DEF(iemOp_setl_Eb)
8342{
8343 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8344 IEMOP_HLP_MIN_386();
8345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8346
8347 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8348 * any way. AMD says it's "unused", whatever that means. We're
8349 * ignoring for now. */
8350 if (IEM_IS_MODRM_REG_MODE(bRm))
8351 {
8352 /* register target */
8353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8354 IEM_MC_BEGIN(0, 0);
8355 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8356 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8357 } IEM_MC_ELSE() {
8358 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8359 } IEM_MC_ENDIF();
8360 IEM_MC_ADVANCE_RIP();
8361 IEM_MC_END();
8362 }
8363 else
8364 {
8365 /* memory target */
8366 IEM_MC_BEGIN(0, 1);
8367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8370 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8371 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8372 } IEM_MC_ELSE() {
8373 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8374 } IEM_MC_ENDIF();
8375 IEM_MC_ADVANCE_RIP();
8376 IEM_MC_END();
8377 }
8378 return VINF_SUCCESS;
8379}
8380
8381
8382/** Opcode 0x0f 0x9d. */
8383FNIEMOP_DEF(iemOp_setnl_Eb)
8384{
8385 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8386 IEMOP_HLP_MIN_386();
8387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8388
8389 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8390 * any way. AMD says it's "unused", whatever that means. We're
8391 * ignoring for now. */
8392 if (IEM_IS_MODRM_REG_MODE(bRm))
8393 {
8394 /* register target */
8395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8396 IEM_MC_BEGIN(0, 0);
8397 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8398 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8399 } IEM_MC_ELSE() {
8400 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8401 } IEM_MC_ENDIF();
8402 IEM_MC_ADVANCE_RIP();
8403 IEM_MC_END();
8404 }
8405 else
8406 {
8407 /* memory target */
8408 IEM_MC_BEGIN(0, 1);
8409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8413 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8414 } IEM_MC_ELSE() {
8415 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8416 } IEM_MC_ENDIF();
8417 IEM_MC_ADVANCE_RIP();
8418 IEM_MC_END();
8419 }
8420 return VINF_SUCCESS;
8421}
8422
8423
8424/** Opcode 0x0f 0x9e. */
8425FNIEMOP_DEF(iemOp_setle_Eb)
8426{
8427 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8428 IEMOP_HLP_MIN_386();
8429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8430
8431 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8432 * any way. AMD says it's "unused", whatever that means. We're
8433 * ignoring for now. */
8434 if (IEM_IS_MODRM_REG_MODE(bRm))
8435 {
8436 /* register target */
8437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8438 IEM_MC_BEGIN(0, 0);
8439 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8440 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8441 } IEM_MC_ELSE() {
8442 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8443 } IEM_MC_ENDIF();
8444 IEM_MC_ADVANCE_RIP();
8445 IEM_MC_END();
8446 }
8447 else
8448 {
8449 /* memory target */
8450 IEM_MC_BEGIN(0, 1);
8451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8454 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8455 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8456 } IEM_MC_ELSE() {
8457 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8458 } IEM_MC_ENDIF();
8459 IEM_MC_ADVANCE_RIP();
8460 IEM_MC_END();
8461 }
8462 return VINF_SUCCESS;
8463}
8464
8465
8466/** Opcode 0x0f 0x9f. */
8467FNIEMOP_DEF(iemOp_setnle_Eb)
8468{
8469 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8470 IEMOP_HLP_MIN_386();
8471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8472
8473 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8474 * any way. AMD says it's "unused", whatever that means. We're
8475 * ignoring for now. */
8476 if (IEM_IS_MODRM_REG_MODE(bRm))
8477 {
8478 /* register target */
8479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8480 IEM_MC_BEGIN(0, 0);
8481 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8482 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8483 } IEM_MC_ELSE() {
8484 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8485 } IEM_MC_ENDIF();
8486 IEM_MC_ADVANCE_RIP();
8487 IEM_MC_END();
8488 }
8489 else
8490 {
8491 /* memory target */
8492 IEM_MC_BEGIN(0, 1);
8493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8496 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8497 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8498 } IEM_MC_ELSE() {
8499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8500 } IEM_MC_ENDIF();
8501 IEM_MC_ADVANCE_RIP();
8502 IEM_MC_END();
8503 }
8504 return VINF_SUCCESS;
8505}
8506
8507
8508/**
8509 * Common 'push segment-register' helper.
8510 */
8511FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8512{
8513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8514 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8515 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8516
8517 switch (pVCpu->iem.s.enmEffOpSize)
8518 {
8519 case IEMMODE_16BIT:
8520 IEM_MC_BEGIN(0, 1);
8521 IEM_MC_LOCAL(uint16_t, u16Value);
8522 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8523 IEM_MC_PUSH_U16(u16Value);
8524 IEM_MC_ADVANCE_RIP();
8525 IEM_MC_END();
8526 break;
8527
8528 case IEMMODE_32BIT:
8529 IEM_MC_BEGIN(0, 1);
8530 IEM_MC_LOCAL(uint32_t, u32Value);
8531 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8532 IEM_MC_PUSH_U32_SREG(u32Value);
8533 IEM_MC_ADVANCE_RIP();
8534 IEM_MC_END();
8535 break;
8536
8537 case IEMMODE_64BIT:
8538 IEM_MC_BEGIN(0, 1);
8539 IEM_MC_LOCAL(uint64_t, u64Value);
8540 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8541 IEM_MC_PUSH_U64(u64Value);
8542 IEM_MC_ADVANCE_RIP();
8543 IEM_MC_END();
8544 break;
8545 }
8546
8547 return VINF_SUCCESS;
8548}
8549
8550
8551/** Opcode 0x0f 0xa0. */
8552FNIEMOP_DEF(iemOp_push_fs)
8553{
8554 IEMOP_MNEMONIC(push_fs, "push fs");
8555 IEMOP_HLP_MIN_386();
8556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8557 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8558}
8559
8560
8561/** Opcode 0x0f 0xa1. */
8562FNIEMOP_DEF(iemOp_pop_fs)
8563{
8564 IEMOP_MNEMONIC(pop_fs, "pop fs");
8565 IEMOP_HLP_MIN_386();
8566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8567 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8568}
8569
8570
8571/** Opcode 0x0f 0xa2. */
8572FNIEMOP_DEF(iemOp_cpuid)
8573{
8574 IEMOP_MNEMONIC(cpuid, "cpuid");
8575 IEMOP_HLP_MIN_486(); /* not all 486es. */
8576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8577 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
8578}
8579
8580
8581/**
8582 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8583 * iemOp_bts_Ev_Gv.
8584 */
8585FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
8586{
8587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8588 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8589
8590 if (IEM_IS_MODRM_REG_MODE(bRm))
8591 {
8592 /* register destination. */
8593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8594 switch (pVCpu->iem.s.enmEffOpSize)
8595 {
8596 case IEMMODE_16BIT:
8597 IEM_MC_BEGIN(3, 0);
8598 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8599 IEM_MC_ARG(uint16_t, u16Src, 1);
8600 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8601
8602 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8603 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
8604 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8605 IEM_MC_REF_EFLAGS(pEFlags);
8606 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8607
8608 IEM_MC_ADVANCE_RIP();
8609 IEM_MC_END();
8610 return VINF_SUCCESS;
8611
8612 case IEMMODE_32BIT:
8613 IEM_MC_BEGIN(3, 0);
8614 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8615 IEM_MC_ARG(uint32_t, u32Src, 1);
8616 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8617
8618 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8619 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
8620 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8621 IEM_MC_REF_EFLAGS(pEFlags);
8622 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8623
8624 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8625 IEM_MC_ADVANCE_RIP();
8626 IEM_MC_END();
8627 return VINF_SUCCESS;
8628
8629 case IEMMODE_64BIT:
8630 IEM_MC_BEGIN(3, 0);
8631 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8632 IEM_MC_ARG(uint64_t, u64Src, 1);
8633 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8634
8635 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8636 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
8637 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8638 IEM_MC_REF_EFLAGS(pEFlags);
8639 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8640
8641 IEM_MC_ADVANCE_RIP();
8642 IEM_MC_END();
8643 return VINF_SUCCESS;
8644
8645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8646 }
8647 }
8648 else
8649 {
8650 /* memory destination. */
8651
8652 uint32_t fAccess;
8653 if (pImpl->pfnLockedU16)
8654 fAccess = IEM_ACCESS_DATA_RW;
8655 else /* BT */
8656 fAccess = IEM_ACCESS_DATA_R;
8657
8658 /** @todo test negative bit offsets! */
8659 switch (pVCpu->iem.s.enmEffOpSize)
8660 {
8661 case IEMMODE_16BIT:
8662 IEM_MC_BEGIN(3, 2);
8663 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8664 IEM_MC_ARG(uint16_t, u16Src, 1);
8665 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8667 IEM_MC_LOCAL(int16_t, i16AddrAdj);
8668
8669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8670 if (pImpl->pfnLockedU16)
8671 IEMOP_HLP_DONE_DECODING();
8672 else
8673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8674 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8675 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
8676 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
8677 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
8678 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
8679 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
8680 IEM_MC_FETCH_EFLAGS(EFlags);
8681
8682 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8683 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8684 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8685 else
8686 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8687 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8688
8689 IEM_MC_COMMIT_EFLAGS(EFlags);
8690 IEM_MC_ADVANCE_RIP();
8691 IEM_MC_END();
8692 return VINF_SUCCESS;
8693
8694 case IEMMODE_32BIT:
8695 IEM_MC_BEGIN(3, 2);
8696 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8697 IEM_MC_ARG(uint32_t, u32Src, 1);
8698 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8700 IEM_MC_LOCAL(int32_t, i32AddrAdj);
8701
8702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8703 if (pImpl->pfnLockedU16)
8704 IEMOP_HLP_DONE_DECODING();
8705 else
8706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8707 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8708 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
8709 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
8710 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
8711 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
8712 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
8713 IEM_MC_FETCH_EFLAGS(EFlags);
8714
8715 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8716 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8717 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8718 else
8719 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8720 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8721
8722 IEM_MC_COMMIT_EFLAGS(EFlags);
8723 IEM_MC_ADVANCE_RIP();
8724 IEM_MC_END();
8725 return VINF_SUCCESS;
8726
8727 case IEMMODE_64BIT:
8728 IEM_MC_BEGIN(3, 2);
8729 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8730 IEM_MC_ARG(uint64_t, u64Src, 1);
8731 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8733 IEM_MC_LOCAL(int64_t, i64AddrAdj);
8734
8735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8736 if (pImpl->pfnLockedU16)
8737 IEMOP_HLP_DONE_DECODING();
8738 else
8739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8740 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8741 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
8742 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
8743 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
8744 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
8745 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
8746 IEM_MC_FETCH_EFLAGS(EFlags);
8747
8748 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8749 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8750 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8751 else
8752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8753 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8754
8755 IEM_MC_COMMIT_EFLAGS(EFlags);
8756 IEM_MC_ADVANCE_RIP();
8757 IEM_MC_END();
8758 return VINF_SUCCESS;
8759
8760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8761 }
8762 }
8763}
8764
8765
8766/** Opcode 0x0f 0xa3. */
8767FNIEMOP_DEF(iemOp_bt_Ev_Gv)
8768{
8769 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
8770 IEMOP_HLP_MIN_386();
8771 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
8772}
8773
8774
8775/**
8776 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
8777 */
8778FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
8779{
8780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8781 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
8782
8783 if (IEM_IS_MODRM_REG_MODE(bRm))
8784 {
8785 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8787
8788 switch (pVCpu->iem.s.enmEffOpSize)
8789 {
8790 case IEMMODE_16BIT:
8791 IEM_MC_BEGIN(4, 0);
8792 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8793 IEM_MC_ARG(uint16_t, u16Src, 1);
8794 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8795 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8796
8797 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8798 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8799 IEM_MC_REF_EFLAGS(pEFlags);
8800 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8801
8802 IEM_MC_ADVANCE_RIP();
8803 IEM_MC_END();
8804 return VINF_SUCCESS;
8805
8806 case IEMMODE_32BIT:
8807 IEM_MC_BEGIN(4, 0);
8808 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8809 IEM_MC_ARG(uint32_t, u32Src, 1);
8810 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8811 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8812
8813 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8814 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8815 IEM_MC_REF_EFLAGS(pEFlags);
8816 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
8817
8818 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8819 IEM_MC_ADVANCE_RIP();
8820 IEM_MC_END();
8821 return VINF_SUCCESS;
8822
8823 case IEMMODE_64BIT:
8824 IEM_MC_BEGIN(4, 0);
8825 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8826 IEM_MC_ARG(uint64_t, u64Src, 1);
8827 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
8828 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8829
8830 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8831 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8832 IEM_MC_REF_EFLAGS(pEFlags);
8833 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
8834
8835 IEM_MC_ADVANCE_RIP();
8836 IEM_MC_END();
8837 return VINF_SUCCESS;
8838
8839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8840 }
8841 }
8842 else
8843 {
8844 switch (pVCpu->iem.s.enmEffOpSize)
8845 {
8846 case IEMMODE_16BIT:
8847 IEM_MC_BEGIN(4, 2);
8848 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8849 IEM_MC_ARG(uint16_t, u16Src, 1);
8850 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8851 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8853
8854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8855 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8856 IEM_MC_ASSIGN(cShiftArg, cShift);
8857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8858 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8859 IEM_MC_FETCH_EFLAGS(EFlags);
8860 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8861 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8862
8863 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8864 IEM_MC_COMMIT_EFLAGS(EFlags);
8865 IEM_MC_ADVANCE_RIP();
8866 IEM_MC_END();
8867 return VINF_SUCCESS;
8868
8869 case IEMMODE_32BIT:
8870 IEM_MC_BEGIN(4, 2);
8871 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8872 IEM_MC_ARG(uint32_t, u32Src, 1);
8873 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8874 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8876
8877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8878 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8879 IEM_MC_ASSIGN(cShiftArg, cShift);
8880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8881 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8882 IEM_MC_FETCH_EFLAGS(EFlags);
8883 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8884 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
8885
8886 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8887 IEM_MC_COMMIT_EFLAGS(EFlags);
8888 IEM_MC_ADVANCE_RIP();
8889 IEM_MC_END();
8890 return VINF_SUCCESS;
8891
8892 case IEMMODE_64BIT:
8893 IEM_MC_BEGIN(4, 2);
8894 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8895 IEM_MC_ARG(uint64_t, u64Src, 1);
8896 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8897 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8899
8900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8901 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8902 IEM_MC_ASSIGN(cShiftArg, cShift);
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8905 IEM_MC_FETCH_EFLAGS(EFlags);
8906 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8907 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
8908
8909 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8910 IEM_MC_COMMIT_EFLAGS(EFlags);
8911 IEM_MC_ADVANCE_RIP();
8912 IEM_MC_END();
8913 return VINF_SUCCESS;
8914
8915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8916 }
8917 }
8918}
8919
8920
8921/**
8922 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
8923 */
8924FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
8925{
8926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
8928
8929 if (IEM_IS_MODRM_REG_MODE(bRm))
8930 {
8931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8932
8933 switch (pVCpu->iem.s.enmEffOpSize)
8934 {
8935 case IEMMODE_16BIT:
8936 IEM_MC_BEGIN(4, 0);
8937 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8938 IEM_MC_ARG(uint16_t, u16Src, 1);
8939 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8940 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8941
8942 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8943 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8944 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8945 IEM_MC_REF_EFLAGS(pEFlags);
8946 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
8947
8948 IEM_MC_ADVANCE_RIP();
8949 IEM_MC_END();
8950 return VINF_SUCCESS;
8951
8952 case IEMMODE_32BIT:
8953 IEM_MC_BEGIN(4, 0);
8954 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8955 IEM_MC_ARG(uint32_t, u32Src, 1);
8956 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8957 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8958
8959 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8960 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8961 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8962 IEM_MC_REF_EFLAGS(pEFlags);
8963 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
8964
8965 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8966 IEM_MC_ADVANCE_RIP();
8967 IEM_MC_END();
8968 return VINF_SUCCESS;
8969
8970 case IEMMODE_64BIT:
8971 IEM_MC_BEGIN(4, 0);
8972 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8973 IEM_MC_ARG(uint64_t, u64Src, 1);
8974 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8975 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8976
8977 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8978 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8979 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8980 IEM_MC_REF_EFLAGS(pEFlags);
8981 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
8982
8983 IEM_MC_ADVANCE_RIP();
8984 IEM_MC_END();
8985 return VINF_SUCCESS;
8986
8987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8988 }
8989 }
8990 else
8991 {
8992 switch (pVCpu->iem.s.enmEffOpSize)
8993 {
8994 case IEMMODE_16BIT:
8995 IEM_MC_BEGIN(4, 2);
8996 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8997 IEM_MC_ARG(uint16_t, u16Src, 1);
8998 IEM_MC_ARG(uint8_t, cShiftArg, 2);
8999 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9001
9002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9004 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9005 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9006 IEM_MC_FETCH_EFLAGS(EFlags);
9007 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9008 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9009
9010 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9011 IEM_MC_COMMIT_EFLAGS(EFlags);
9012 IEM_MC_ADVANCE_RIP();
9013 IEM_MC_END();
9014 return VINF_SUCCESS;
9015
9016 case IEMMODE_32BIT:
9017 IEM_MC_BEGIN(4, 2);
9018 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9019 IEM_MC_ARG(uint32_t, u32Src, 1);
9020 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9021 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9023
9024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9026 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9027 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9028 IEM_MC_FETCH_EFLAGS(EFlags);
9029 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9030 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9031
9032 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9033 IEM_MC_COMMIT_EFLAGS(EFlags);
9034 IEM_MC_ADVANCE_RIP();
9035 IEM_MC_END();
9036 return VINF_SUCCESS;
9037
9038 case IEMMODE_64BIT:
9039 IEM_MC_BEGIN(4, 2);
9040 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9041 IEM_MC_ARG(uint64_t, u64Src, 1);
9042 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9043 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9045
9046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9048 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9049 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9050 IEM_MC_FETCH_EFLAGS(EFlags);
9051 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9052 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9053
9054 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9055 IEM_MC_COMMIT_EFLAGS(EFlags);
9056 IEM_MC_ADVANCE_RIP();
9057 IEM_MC_END();
9058 return VINF_SUCCESS;
9059
9060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9061 }
9062 }
9063}
9064
9065
9066
9067/** Opcode 0x0f 0xa4. */
9068FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9069{
9070 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9071 IEMOP_HLP_MIN_386();
9072 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9073}
9074
9075
9076/** Opcode 0x0f 0xa5. */
9077FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9078{
9079 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9080 IEMOP_HLP_MIN_386();
9081 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9082}
9083
9084
9085/** Opcode 0x0f 0xa8. */
9086FNIEMOP_DEF(iemOp_push_gs)
9087{
9088 IEMOP_MNEMONIC(push_gs, "push gs");
9089 IEMOP_HLP_MIN_386();
9090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9091 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9092}
9093
9094
9095/** Opcode 0x0f 0xa9. */
9096FNIEMOP_DEF(iemOp_pop_gs)
9097{
9098 IEMOP_MNEMONIC(pop_gs, "pop gs");
9099 IEMOP_HLP_MIN_386();
9100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9101 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9102}
9103
9104
9105/** Opcode 0x0f 0xaa. */
9106FNIEMOP_DEF(iemOp_rsm)
9107{
9108 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9109 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9111 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9112}
9113
9114
9115
9116/** Opcode 0x0f 0xab. */
9117FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9118{
9119 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9120 IEMOP_HLP_MIN_386();
9121 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
9122}
9123
9124
9125/** Opcode 0x0f 0xac. */
9126FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9127{
9128 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9129 IEMOP_HLP_MIN_386();
9130 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9131}
9132
9133
9134/** Opcode 0x0f 0xad. */
9135FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9136{
9137 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9138 IEMOP_HLP_MIN_386();
9139 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9140}
9141
9142
9143/** Opcode 0x0f 0xae mem/0. */
9144FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9145{
9146 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9147 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9148 return IEMOP_RAISE_INVALID_OPCODE();
9149
9150 IEM_MC_BEGIN(3, 1);
9151 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9152 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9153 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9156 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9157 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9158 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9159 IEM_MC_END();
9160 return VINF_SUCCESS;
9161}
9162
9163
9164/** Opcode 0x0f 0xae mem/1. */
9165FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9166{
9167 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9168 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9169 return IEMOP_RAISE_INVALID_OPCODE();
9170
9171 IEM_MC_BEGIN(3, 1);
9172 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9173 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9174 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9177 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9178 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9179 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9180 IEM_MC_END();
9181 return VINF_SUCCESS;
9182}
9183
9184
9185/**
9186 * @opmaps grp15
9187 * @opcode !11/2
9188 * @oppfx none
9189 * @opcpuid sse
9190 * @opgroup og_sse_mxcsrsm
9191 * @opxcpttype 5
9192 * @optest op1=0 -> mxcsr=0
9193 * @optest op1=0x2083 -> mxcsr=0x2083
9194 * @optest op1=0xfffffffe -> value.xcpt=0xd
9195 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9196 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9197 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9198 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9199 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9200 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9201 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9202 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9203 */
9204FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9205{
9206 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9207 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9208 return IEMOP_RAISE_INVALID_OPCODE();
9209
9210 IEM_MC_BEGIN(2, 0);
9211 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9212 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9215 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9216 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9217 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9218 IEM_MC_END();
9219 return VINF_SUCCESS;
9220}
9221
9222
9223/**
9224 * @opmaps grp15
9225 * @opcode !11/3
9226 * @oppfx none
9227 * @opcpuid sse
9228 * @opgroup og_sse_mxcsrsm
9229 * @opxcpttype 5
9230 * @optest mxcsr=0 -> op1=0
9231 * @optest mxcsr=0x2083 -> op1=0x2083
9232 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9233 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9234 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9235 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9236 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9237 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9238 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9239 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9240 */
9241FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9242{
9243 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9244 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9245 return IEMOP_RAISE_INVALID_OPCODE();
9246
9247 IEM_MC_BEGIN(2, 0);
9248 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9249 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9252 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9253 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9254 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9255 IEM_MC_END();
9256 return VINF_SUCCESS;
9257}
9258
9259
9260/**
9261 * @opmaps grp15
9262 * @opcode !11/4
9263 * @oppfx none
9264 * @opcpuid xsave
9265 * @opgroup og_system
9266 * @opxcpttype none
9267 */
9268FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9269{
9270 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9271 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9272 return IEMOP_RAISE_INVALID_OPCODE();
9273
9274 IEM_MC_BEGIN(3, 0);
9275 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9276 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9277 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9280 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9281 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9282 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9283 IEM_MC_END();
9284 return VINF_SUCCESS;
9285}
9286
9287
9288/**
9289 * @opmaps grp15
9290 * @opcode !11/5
9291 * @oppfx none
9292 * @opcpuid xsave
9293 * @opgroup og_system
9294 * @opxcpttype none
9295 */
9296FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9297{
9298 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9299 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9300 return IEMOP_RAISE_INVALID_OPCODE();
9301
9302 IEM_MC_BEGIN(3, 0);
9303 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9304 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9305 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9308 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9309 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9310 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9311 IEM_MC_END();
9312 return VINF_SUCCESS;
9313}
9314
9315/** Opcode 0x0f 0xae mem/6. */
9316FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9317
9318/**
9319 * @opmaps grp15
9320 * @opcode !11/7
9321 * @oppfx none
9322 * @opcpuid clfsh
9323 * @opgroup og_cachectl
9324 * @optest op1=1 ->
9325 */
9326FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9327{
9328 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9329 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9330 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9331
9332 IEM_MC_BEGIN(2, 0);
9333 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9334 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9337 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9338 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9339 IEM_MC_END();
9340 return VINF_SUCCESS;
9341}
9342
9343/**
9344 * @opmaps grp15
9345 * @opcode !11/7
9346 * @oppfx 0x66
9347 * @opcpuid clflushopt
9348 * @opgroup og_cachectl
9349 * @optest op1=1 ->
9350 */
9351FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9352{
9353 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9354 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9355 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9356
9357 IEM_MC_BEGIN(2, 0);
9358 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9359 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9362 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9363 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9364 IEM_MC_END();
9365 return VINF_SUCCESS;
9366}
9367
9368
9369/** Opcode 0x0f 0xae 11b/5. */
9370FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9371{
9372 RT_NOREF_PV(bRm);
9373 IEMOP_MNEMONIC(lfence, "lfence");
9374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9375 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9376 return IEMOP_RAISE_INVALID_OPCODE();
9377
9378 IEM_MC_BEGIN(0, 0);
9379#ifndef RT_ARCH_ARM64
9380 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9381#endif
9382 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9383#ifndef RT_ARCH_ARM64
9384 else
9385 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9386#endif
9387 IEM_MC_ADVANCE_RIP();
9388 IEM_MC_END();
9389 return VINF_SUCCESS;
9390}
9391
9392
9393/** Opcode 0x0f 0xae 11b/6. */
9394FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9395{
9396 RT_NOREF_PV(bRm);
9397 IEMOP_MNEMONIC(mfence, "mfence");
9398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9399 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9400 return IEMOP_RAISE_INVALID_OPCODE();
9401
9402 IEM_MC_BEGIN(0, 0);
9403#ifndef RT_ARCH_ARM64
9404 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9405#endif
9406 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9407#ifndef RT_ARCH_ARM64
9408 else
9409 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9410#endif
9411 IEM_MC_ADVANCE_RIP();
9412 IEM_MC_END();
9413 return VINF_SUCCESS;
9414}
9415
9416
9417/** Opcode 0x0f 0xae 11b/7. */
9418FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9419{
9420 RT_NOREF_PV(bRm);
9421 IEMOP_MNEMONIC(sfence, "sfence");
9422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9423 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9424 return IEMOP_RAISE_INVALID_OPCODE();
9425
9426 IEM_MC_BEGIN(0, 0);
9427#ifndef RT_ARCH_ARM64
9428 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9429#endif
9430 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9431#ifndef RT_ARCH_ARM64
9432 else
9433 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9434#endif
9435 IEM_MC_ADVANCE_RIP();
9436 IEM_MC_END();
9437 return VINF_SUCCESS;
9438}
9439
9440
9441/** Opcode 0xf3 0x0f 0xae 11b/0. */
9442FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9443{
9444 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9446 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9447 {
9448 IEM_MC_BEGIN(1, 0);
9449 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9450 IEM_MC_ARG(uint64_t, u64Dst, 0);
9451 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9452 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9453 IEM_MC_ADVANCE_RIP();
9454 IEM_MC_END();
9455 }
9456 else
9457 {
9458 IEM_MC_BEGIN(1, 0);
9459 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9460 IEM_MC_ARG(uint32_t, u32Dst, 0);
9461 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9462 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9463 IEM_MC_ADVANCE_RIP();
9464 IEM_MC_END();
9465 }
9466 return VINF_SUCCESS;
9467}
9468
9469
9470/** Opcode 0xf3 0x0f 0xae 11b/1. */
9471FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9472{
9473 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9475 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9476 {
9477 IEM_MC_BEGIN(1, 0);
9478 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9479 IEM_MC_ARG(uint64_t, u64Dst, 0);
9480 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9481 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9482 IEM_MC_ADVANCE_RIP();
9483 IEM_MC_END();
9484 }
9485 else
9486 {
9487 IEM_MC_BEGIN(1, 0);
9488 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9489 IEM_MC_ARG(uint32_t, u32Dst, 0);
9490 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9491 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9492 IEM_MC_ADVANCE_RIP();
9493 IEM_MC_END();
9494 }
9495 return VINF_SUCCESS;
9496}
9497
9498
9499/** Opcode 0xf3 0x0f 0xae 11b/2. */
9500FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9501{
9502 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9504 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9505 {
9506 IEM_MC_BEGIN(1, 0);
9507 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9508 IEM_MC_ARG(uint64_t, u64Dst, 0);
9509 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9510 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9511 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9512 IEM_MC_ADVANCE_RIP();
9513 IEM_MC_END();
9514 }
9515 else
9516 {
9517 IEM_MC_BEGIN(1, 0);
9518 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9519 IEM_MC_ARG(uint32_t, u32Dst, 0);
9520 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9521 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9522 IEM_MC_ADVANCE_RIP();
9523 IEM_MC_END();
9524 }
9525 return VINF_SUCCESS;
9526}
9527
9528
9529/** Opcode 0xf3 0x0f 0xae 11b/3. */
9530FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9531{
9532 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9534 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9535 {
9536 IEM_MC_BEGIN(1, 0);
9537 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9538 IEM_MC_ARG(uint64_t, u64Dst, 0);
9539 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9540 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9541 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9542 IEM_MC_ADVANCE_RIP();
9543 IEM_MC_END();
9544 }
9545 else
9546 {
9547 IEM_MC_BEGIN(1, 0);
9548 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9549 IEM_MC_ARG(uint32_t, u32Dst, 0);
9550 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9551 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
9552 IEM_MC_ADVANCE_RIP();
9553 IEM_MC_END();
9554 }
9555 return VINF_SUCCESS;
9556}
9557
9558
9559/**
9560 * Group 15 jump table for register variant.
9561 */
9562IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
9563{ /* pfx: none, 066h, 0f3h, 0f2h */
9564 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
9565 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
9566 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
9567 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
9568 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9569 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9570 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9571 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9572};
9573AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
9574
9575
9576/**
9577 * Group 15 jump table for memory variant.
9578 */
9579IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
9580{ /* pfx: none, 066h, 0f3h, 0f2h */
9581 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9582 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9583 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9584 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9585 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9586 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9587 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9588 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9589};
9590AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
9591
9592
9593/** Opcode 0x0f 0xae. */
9594FNIEMOP_DEF(iemOp_Grp15)
9595{
9596 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
9597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9598 if (IEM_IS_MODRM_REG_MODE(bRm))
9599 /* register, register */
9600 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9601 + pVCpu->iem.s.idxPrefix], bRm);
9602 /* memory, register */
9603 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9604 + pVCpu->iem.s.idxPrefix], bRm);
9605}
9606
9607
9608/** Opcode 0x0f 0xaf. */
9609FNIEMOP_DEF(iemOp_imul_Gv_Ev)
9610{
9611 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
9612 IEMOP_HLP_MIN_386();
9613 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9614 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
9615}
9616
9617
9618/** Opcode 0x0f 0xb0. */
9619FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
9620{
9621 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
9622 IEMOP_HLP_MIN_486();
9623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9624
9625 if (IEM_IS_MODRM_REG_MODE(bRm))
9626 {
9627 IEMOP_HLP_DONE_DECODING();
9628 IEM_MC_BEGIN(4, 0);
9629 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9630 IEM_MC_ARG(uint8_t *, pu8Al, 1);
9631 IEM_MC_ARG(uint8_t, u8Src, 2);
9632 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9633
9634 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9635 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9636 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
9637 IEM_MC_REF_EFLAGS(pEFlags);
9638 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9639 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
9640 else
9641 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
9642
9643 IEM_MC_ADVANCE_RIP();
9644 IEM_MC_END();
9645 }
9646 else
9647 {
9648 IEM_MC_BEGIN(4, 3);
9649 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9650 IEM_MC_ARG(uint8_t *, pu8Al, 1);
9651 IEM_MC_ARG(uint8_t, u8Src, 2);
9652 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9654 IEM_MC_LOCAL(uint8_t, u8Al);
9655
9656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9657 IEMOP_HLP_DONE_DECODING();
9658 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9659 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9660 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
9661 IEM_MC_FETCH_EFLAGS(EFlags);
9662 IEM_MC_REF_LOCAL(pu8Al, u8Al);
9663 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9664 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
9665 else
9666 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
9667
9668 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9669 IEM_MC_COMMIT_EFLAGS(EFlags);
9670 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
9671 IEM_MC_ADVANCE_RIP();
9672 IEM_MC_END();
9673 }
9674 return VINF_SUCCESS;
9675}
9676
9677/** Opcode 0x0f 0xb1. */
9678FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
9679{
9680 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
9681 IEMOP_HLP_MIN_486();
9682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9683
9684 if (IEM_IS_MODRM_REG_MODE(bRm))
9685 {
9686 IEMOP_HLP_DONE_DECODING();
9687 switch (pVCpu->iem.s.enmEffOpSize)
9688 {
9689 case IEMMODE_16BIT:
9690 IEM_MC_BEGIN(4, 0);
9691 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9692 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
9693 IEM_MC_ARG(uint16_t, u16Src, 2);
9694 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9695
9696 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9697 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9698 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
9699 IEM_MC_REF_EFLAGS(pEFlags);
9700 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9701 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
9702 else
9703 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
9704
9705 IEM_MC_ADVANCE_RIP();
9706 IEM_MC_END();
9707 return VINF_SUCCESS;
9708
9709 case IEMMODE_32BIT:
9710 IEM_MC_BEGIN(4, 0);
9711 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9712 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
9713 IEM_MC_ARG(uint32_t, u32Src, 2);
9714 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9715
9716 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9717 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9718 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
9719 IEM_MC_REF_EFLAGS(pEFlags);
9720 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9721 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
9722 else
9723 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
9724
9725 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
9726 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9727 IEM_MC_ADVANCE_RIP();
9728 IEM_MC_END();
9729 return VINF_SUCCESS;
9730
9731 case IEMMODE_64BIT:
9732 IEM_MC_BEGIN(4, 0);
9733 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9734 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
9735#ifdef RT_ARCH_X86
9736 IEM_MC_ARG(uint64_t *, pu64Src, 2);
9737#else
9738 IEM_MC_ARG(uint64_t, u64Src, 2);
9739#endif
9740 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9741
9742 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9743 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
9744 IEM_MC_REF_EFLAGS(pEFlags);
9745#ifdef RT_ARCH_X86
9746 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9747 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9748 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
9749 else
9750 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
9751#else
9752 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9753 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9754 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
9755 else
9756 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
9757#endif
9758
9759 IEM_MC_ADVANCE_RIP();
9760 IEM_MC_END();
9761 return VINF_SUCCESS;
9762
9763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9764 }
9765 }
9766 else
9767 {
9768 switch (pVCpu->iem.s.enmEffOpSize)
9769 {
9770 case IEMMODE_16BIT:
9771 IEM_MC_BEGIN(4, 3);
9772 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9773 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
9774 IEM_MC_ARG(uint16_t, u16Src, 2);
9775 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9777 IEM_MC_LOCAL(uint16_t, u16Ax);
9778
9779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9780 IEMOP_HLP_DONE_DECODING();
9781 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9782 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9783 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
9784 IEM_MC_FETCH_EFLAGS(EFlags);
9785 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
9786 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9787 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
9788 else
9789 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
9790
9791 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9792 IEM_MC_COMMIT_EFLAGS(EFlags);
9793 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
9794 IEM_MC_ADVANCE_RIP();
9795 IEM_MC_END();
9796 return VINF_SUCCESS;
9797
9798 case IEMMODE_32BIT:
9799 IEM_MC_BEGIN(4, 3);
9800 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9801 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
9802 IEM_MC_ARG(uint32_t, u32Src, 2);
9803 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9805 IEM_MC_LOCAL(uint32_t, u32Eax);
9806
9807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9808 IEMOP_HLP_DONE_DECODING();
9809 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9810 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9811 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
9812 IEM_MC_FETCH_EFLAGS(EFlags);
9813 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
9814 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9815 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
9816 else
9817 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
9818
9819 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9820 IEM_MC_COMMIT_EFLAGS(EFlags);
9821 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
9822 IEM_MC_ADVANCE_RIP();
9823 IEM_MC_END();
9824 return VINF_SUCCESS;
9825
9826 case IEMMODE_64BIT:
9827 IEM_MC_BEGIN(4, 3);
9828 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9829 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
9830#ifdef RT_ARCH_X86
9831 IEM_MC_ARG(uint64_t *, pu64Src, 2);
9832#else
9833 IEM_MC_ARG(uint64_t, u64Src, 2);
9834#endif
9835 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9837 IEM_MC_LOCAL(uint64_t, u64Rax);
9838
9839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9840 IEMOP_HLP_DONE_DECODING();
9841 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9842 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
9843 IEM_MC_FETCH_EFLAGS(EFlags);
9844 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
9845#ifdef RT_ARCH_X86
9846 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9847 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9848 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
9849 else
9850 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
9851#else
9852 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9853 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9854 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
9855 else
9856 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
9857#endif
9858
9859 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9860 IEM_MC_COMMIT_EFLAGS(EFlags);
9861 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
9862 IEM_MC_ADVANCE_RIP();
9863 IEM_MC_END();
9864 return VINF_SUCCESS;
9865
9866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9867 }
9868 }
9869}
9870
9871
9872FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
9873{
9874 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
9875 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
9876
9877 switch (pVCpu->iem.s.enmEffOpSize)
9878 {
9879 case IEMMODE_16BIT:
9880 IEM_MC_BEGIN(5, 1);
9881 IEM_MC_ARG(uint16_t, uSel, 0);
9882 IEM_MC_ARG(uint16_t, offSeg, 1);
9883 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9884 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9885 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9886 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9889 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9890 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
9891 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9892 IEM_MC_END();
9893 return VINF_SUCCESS;
9894
9895 case IEMMODE_32BIT:
9896 IEM_MC_BEGIN(5, 1);
9897 IEM_MC_ARG(uint16_t, uSel, 0);
9898 IEM_MC_ARG(uint32_t, offSeg, 1);
9899 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9900 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9901 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9902 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9905 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9906 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
9907 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9908 IEM_MC_END();
9909 return VINF_SUCCESS;
9910
9911 case IEMMODE_64BIT:
9912 IEM_MC_BEGIN(5, 1);
9913 IEM_MC_ARG(uint16_t, uSel, 0);
9914 IEM_MC_ARG(uint64_t, offSeg, 1);
9915 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
9916 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
9917 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
9918 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
9919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9921 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
9922 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9923 else
9924 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
9925 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
9926 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
9927 IEM_MC_END();
9928 return VINF_SUCCESS;
9929
9930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9931 }
9932}
9933
9934
9935/** Opcode 0x0f 0xb2. */
9936FNIEMOP_DEF(iemOp_lss_Gv_Mp)
9937{
9938 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
9939 IEMOP_HLP_MIN_386();
9940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9941 if (IEM_IS_MODRM_REG_MODE(bRm))
9942 return IEMOP_RAISE_INVALID_OPCODE();
9943 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
9944}
9945
9946
9947/** Opcode 0x0f 0xb3. */
9948FNIEMOP_DEF(iemOp_btr_Ev_Gv)
9949{
9950 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
9951 IEMOP_HLP_MIN_386();
9952 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
9953}
9954
9955
9956/** Opcode 0x0f 0xb4. */
9957FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
9958{
9959 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
9960 IEMOP_HLP_MIN_386();
9961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9962 if (IEM_IS_MODRM_REG_MODE(bRm))
9963 return IEMOP_RAISE_INVALID_OPCODE();
9964 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
9965}
9966
9967
9968/** Opcode 0x0f 0xb5. */
9969FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
9970{
9971 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
9972 IEMOP_HLP_MIN_386();
9973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9974 if (IEM_IS_MODRM_REG_MODE(bRm))
9975 return IEMOP_RAISE_INVALID_OPCODE();
9976 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
9977}
9978
9979
9980/** Opcode 0x0f 0xb6. */
9981FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
9982{
9983 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
9984 IEMOP_HLP_MIN_386();
9985
9986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9987
9988 /*
9989 * If rm is denoting a register, no more instruction bytes.
9990 */
9991 if (IEM_IS_MODRM_REG_MODE(bRm))
9992 {
9993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9994 switch (pVCpu->iem.s.enmEffOpSize)
9995 {
9996 case IEMMODE_16BIT:
9997 IEM_MC_BEGIN(0, 1);
9998 IEM_MC_LOCAL(uint16_t, u16Value);
9999 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10000 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10001 IEM_MC_ADVANCE_RIP();
10002 IEM_MC_END();
10003 return VINF_SUCCESS;
10004
10005 case IEMMODE_32BIT:
10006 IEM_MC_BEGIN(0, 1);
10007 IEM_MC_LOCAL(uint32_t, u32Value);
10008 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10009 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10010 IEM_MC_ADVANCE_RIP();
10011 IEM_MC_END();
10012 return VINF_SUCCESS;
10013
10014 case IEMMODE_64BIT:
10015 IEM_MC_BEGIN(0, 1);
10016 IEM_MC_LOCAL(uint64_t, u64Value);
10017 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10018 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10019 IEM_MC_ADVANCE_RIP();
10020 IEM_MC_END();
10021 return VINF_SUCCESS;
10022
10023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10024 }
10025 }
10026 else
10027 {
10028 /*
10029 * We're loading a register from memory.
10030 */
10031 switch (pVCpu->iem.s.enmEffOpSize)
10032 {
10033 case IEMMODE_16BIT:
10034 IEM_MC_BEGIN(0, 2);
10035 IEM_MC_LOCAL(uint16_t, u16Value);
10036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10039 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10040 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10041 IEM_MC_ADVANCE_RIP();
10042 IEM_MC_END();
10043 return VINF_SUCCESS;
10044
10045 case IEMMODE_32BIT:
10046 IEM_MC_BEGIN(0, 2);
10047 IEM_MC_LOCAL(uint32_t, u32Value);
10048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10051 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10052 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10053 IEM_MC_ADVANCE_RIP();
10054 IEM_MC_END();
10055 return VINF_SUCCESS;
10056
10057 case IEMMODE_64BIT:
10058 IEM_MC_BEGIN(0, 2);
10059 IEM_MC_LOCAL(uint64_t, u64Value);
10060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10063 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10064 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10065 IEM_MC_ADVANCE_RIP();
10066 IEM_MC_END();
10067 return VINF_SUCCESS;
10068
10069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10070 }
10071 }
10072}
10073
10074
10075/** Opcode 0x0f 0xb7. */
10076FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10077{
10078 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10079 IEMOP_HLP_MIN_386();
10080
10081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10082
10083 /** @todo Not entirely sure how the operand size prefix is handled here,
10084 * assuming that it will be ignored. Would be nice to have a few
10085 * test for this. */
10086 /*
10087 * If rm is denoting a register, no more instruction bytes.
10088 */
10089 if (IEM_IS_MODRM_REG_MODE(bRm))
10090 {
10091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10092 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10093 {
10094 IEM_MC_BEGIN(0, 1);
10095 IEM_MC_LOCAL(uint32_t, u32Value);
10096 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10097 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10098 IEM_MC_ADVANCE_RIP();
10099 IEM_MC_END();
10100 }
10101 else
10102 {
10103 IEM_MC_BEGIN(0, 1);
10104 IEM_MC_LOCAL(uint64_t, u64Value);
10105 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10106 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10107 IEM_MC_ADVANCE_RIP();
10108 IEM_MC_END();
10109 }
10110 }
10111 else
10112 {
10113 /*
10114 * We're loading a register from memory.
10115 */
10116 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10117 {
10118 IEM_MC_BEGIN(0, 2);
10119 IEM_MC_LOCAL(uint32_t, u32Value);
10120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10123 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10124 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10125 IEM_MC_ADVANCE_RIP();
10126 IEM_MC_END();
10127 }
10128 else
10129 {
10130 IEM_MC_BEGIN(0, 2);
10131 IEM_MC_LOCAL(uint64_t, u64Value);
10132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10135 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10136 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10137 IEM_MC_ADVANCE_RIP();
10138 IEM_MC_END();
10139 }
10140 }
10141 return VINF_SUCCESS;
10142}
10143
10144
10145/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10146FNIEMOP_UD_STUB(iemOp_jmpe);
10147
10148
10149/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10150FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10151{
10152 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10153 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10154 return iemOp_InvalidNeedRM(pVCpu);
10155#ifndef TST_IEM_CHECK_MC
10156# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10157 static const IEMOPBINSIZES s_Native =
10158 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10159# endif
10160 static const IEMOPBINSIZES s_Fallback =
10161 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10162#endif
10163 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
10164}
10165
10166
10167/**
10168 * @opcode 0xb9
10169 * @opinvalid intel-modrm
10170 * @optest ->
10171 */
10172FNIEMOP_DEF(iemOp_Grp10)
10173{
10174 /*
10175 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10176 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10177 */
10178 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10179 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10180 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10181}
10182
10183
10184/** Opcode 0x0f 0xba. */
10185FNIEMOP_DEF(iemOp_Grp8)
10186{
10187 IEMOP_HLP_MIN_386();
10188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10189 PCIEMOPBINSIZES pImpl;
10190 switch (IEM_GET_MODRM_REG_8(bRm))
10191 {
10192 case 0: case 1: case 2: case 3:
10193 /* Both AMD and Intel want full modr/m decoding and imm8. */
10194 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10195 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
10196 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
10197 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
10198 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
10199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10200 }
10201 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10202
10203 if (IEM_IS_MODRM_REG_MODE(bRm))
10204 {
10205 /* register destination. */
10206 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10208
10209 switch (pVCpu->iem.s.enmEffOpSize)
10210 {
10211 case IEMMODE_16BIT:
10212 IEM_MC_BEGIN(3, 0);
10213 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10214 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
10215 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10216
10217 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10218 IEM_MC_REF_EFLAGS(pEFlags);
10219 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10220
10221 IEM_MC_ADVANCE_RIP();
10222 IEM_MC_END();
10223 return VINF_SUCCESS;
10224
10225 case IEMMODE_32BIT:
10226 IEM_MC_BEGIN(3, 0);
10227 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10228 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
10229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10230
10231 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10232 IEM_MC_REF_EFLAGS(pEFlags);
10233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10234
10235 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10236 IEM_MC_ADVANCE_RIP();
10237 IEM_MC_END();
10238 return VINF_SUCCESS;
10239
10240 case IEMMODE_64BIT:
10241 IEM_MC_BEGIN(3, 0);
10242 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10243 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
10244 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10245
10246 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10247 IEM_MC_REF_EFLAGS(pEFlags);
10248 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10249
10250 IEM_MC_ADVANCE_RIP();
10251 IEM_MC_END();
10252 return VINF_SUCCESS;
10253
10254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10255 }
10256 }
10257 else
10258 {
10259 /* memory destination. */
10260
10261 uint32_t fAccess;
10262 if (pImpl->pfnLockedU16)
10263 fAccess = IEM_ACCESS_DATA_RW;
10264 else /* BT */
10265 fAccess = IEM_ACCESS_DATA_R;
10266
10267 /** @todo test negative bit offsets! */
10268 switch (pVCpu->iem.s.enmEffOpSize)
10269 {
10270 case IEMMODE_16BIT:
10271 IEM_MC_BEGIN(3, 1);
10272 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10273 IEM_MC_ARG(uint16_t, u16Src, 1);
10274 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10276
10277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10278 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10279 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
10280 if (pImpl->pfnLockedU16)
10281 IEMOP_HLP_DONE_DECODING();
10282 else
10283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10284 IEM_MC_FETCH_EFLAGS(EFlags);
10285 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10286 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10287 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10288 else
10289 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10291
10292 IEM_MC_COMMIT_EFLAGS(EFlags);
10293 IEM_MC_ADVANCE_RIP();
10294 IEM_MC_END();
10295 return VINF_SUCCESS;
10296
10297 case IEMMODE_32BIT:
10298 IEM_MC_BEGIN(3, 1);
10299 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10300 IEM_MC_ARG(uint32_t, u32Src, 1);
10301 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10303
10304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10305 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10306 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
10307 if (pImpl->pfnLockedU16)
10308 IEMOP_HLP_DONE_DECODING();
10309 else
10310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10311 IEM_MC_FETCH_EFLAGS(EFlags);
10312 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10313 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10314 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10315 else
10316 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10317 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10318
10319 IEM_MC_COMMIT_EFLAGS(EFlags);
10320 IEM_MC_ADVANCE_RIP();
10321 IEM_MC_END();
10322 return VINF_SUCCESS;
10323
10324 case IEMMODE_64BIT:
10325 IEM_MC_BEGIN(3, 1);
10326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10327 IEM_MC_ARG(uint64_t, u64Src, 1);
10328 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10330
10331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10332 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10333 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
10334 if (pImpl->pfnLockedU16)
10335 IEMOP_HLP_DONE_DECODING();
10336 else
10337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10338 IEM_MC_FETCH_EFLAGS(EFlags);
10339 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10340 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10341 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10342 else
10343 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10344 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10345
10346 IEM_MC_COMMIT_EFLAGS(EFlags);
10347 IEM_MC_ADVANCE_RIP();
10348 IEM_MC_END();
10349 return VINF_SUCCESS;
10350
10351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10352 }
10353 }
10354}
10355
10356
10357/** Opcode 0x0f 0xbb. */
10358FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10359{
10360 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10361 IEMOP_HLP_MIN_386();
10362 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
10363}
10364
10365
10366/**
10367 * Common worker for BSF and BSR instructions.
10368 *
10369 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10370 * the destination register, which means that for 32-bit operations the high
10371 * bits must be left alone.
10372 *
10373 * @param pImpl Pointer to the instruction implementation (assembly).
10374 */
10375FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10376{
10377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10378
10379 /*
10380 * If rm is denoting a register, no more instruction bytes.
10381 */
10382 if (IEM_IS_MODRM_REG_MODE(bRm))
10383 {
10384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10385 switch (pVCpu->iem.s.enmEffOpSize)
10386 {
10387 case IEMMODE_16BIT:
10388 IEM_MC_BEGIN(3, 0);
10389 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10390 IEM_MC_ARG(uint16_t, u16Src, 1);
10391 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10392
10393 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10394 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10395 IEM_MC_REF_EFLAGS(pEFlags);
10396 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10397
10398 IEM_MC_ADVANCE_RIP();
10399 IEM_MC_END();
10400 break;
10401
10402 case IEMMODE_32BIT:
10403 IEM_MC_BEGIN(3, 0);
10404 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10405 IEM_MC_ARG(uint32_t, u32Src, 1);
10406 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10407
10408 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10409 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10410 IEM_MC_REF_EFLAGS(pEFlags);
10411 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10412 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10413 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10414 IEM_MC_ENDIF();
10415 IEM_MC_ADVANCE_RIP();
10416 IEM_MC_END();
10417 break;
10418
10419 case IEMMODE_64BIT:
10420 IEM_MC_BEGIN(3, 0);
10421 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10422 IEM_MC_ARG(uint64_t, u64Src, 1);
10423 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10424
10425 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10426 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10427 IEM_MC_REF_EFLAGS(pEFlags);
10428 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10429
10430 IEM_MC_ADVANCE_RIP();
10431 IEM_MC_END();
10432 break;
10433 }
10434 }
10435 else
10436 {
10437 /*
10438 * We're accessing memory.
10439 */
10440 switch (pVCpu->iem.s.enmEffOpSize)
10441 {
10442 case IEMMODE_16BIT:
10443 IEM_MC_BEGIN(3, 1);
10444 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10445 IEM_MC_ARG(uint16_t, u16Src, 1);
10446 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10448
10449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10451 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10452 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10453 IEM_MC_REF_EFLAGS(pEFlags);
10454 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10455
10456 IEM_MC_ADVANCE_RIP();
10457 IEM_MC_END();
10458 break;
10459
10460 case IEMMODE_32BIT:
10461 IEM_MC_BEGIN(3, 1);
10462 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10463 IEM_MC_ARG(uint32_t, u32Src, 1);
10464 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10466
10467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10469 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10470 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10471 IEM_MC_REF_EFLAGS(pEFlags);
10472 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10473
10474 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10475 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10476 IEM_MC_ENDIF();
10477 IEM_MC_ADVANCE_RIP();
10478 IEM_MC_END();
10479 break;
10480
10481 case IEMMODE_64BIT:
10482 IEM_MC_BEGIN(3, 1);
10483 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10484 IEM_MC_ARG(uint64_t, u64Src, 1);
10485 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10487
10488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10490 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10491 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10492 IEM_MC_REF_EFLAGS(pEFlags);
10493 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10494
10495 IEM_MC_ADVANCE_RIP();
10496 IEM_MC_END();
10497 break;
10498 }
10499 }
10500 return VINF_SUCCESS;
10501}
10502
10503
10504/** Opcode 0x0f 0xbc. */
10505FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
10506{
10507 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
10508 IEMOP_HLP_MIN_386();
10509 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10510 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
10511}
10512
10513
10514/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
10515FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
10516{
10517 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10518 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
10519 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10520
10521#ifndef TST_IEM_CHECK_MC
10522 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
10523 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
10524 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
10525 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
10526 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
10527 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
10528 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
10529 {
10530 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
10531 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
10532 };
10533#endif
10534 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10535 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10536 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10537}
10538
10539
10540/** Opcode 0x0f 0xbd. */
10541FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
10542{
10543 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
10544 IEMOP_HLP_MIN_386();
10545 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10546 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
10547}
10548
10549
10550/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
10551FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
10552{
10553 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10554 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
10555 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10556
10557#ifndef TST_IEM_CHECK_MC
10558 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
10559 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
10560 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
10561 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
10562 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
10563 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
10564 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
10565 {
10566 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
10567 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
10568 };
10569#endif
10570 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10571 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10572 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10573}
10574
10575
10576
10577/** Opcode 0x0f 0xbe. */
10578FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
10579{
10580 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
10581 IEMOP_HLP_MIN_386();
10582
10583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10584
10585 /*
10586 * If rm is denoting a register, no more instruction bytes.
10587 */
10588 if (IEM_IS_MODRM_REG_MODE(bRm))
10589 {
10590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10591 switch (pVCpu->iem.s.enmEffOpSize)
10592 {
10593 case IEMMODE_16BIT:
10594 IEM_MC_BEGIN(0, 1);
10595 IEM_MC_LOCAL(uint16_t, u16Value);
10596 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10597 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10598 IEM_MC_ADVANCE_RIP();
10599 IEM_MC_END();
10600 return VINF_SUCCESS;
10601
10602 case IEMMODE_32BIT:
10603 IEM_MC_BEGIN(0, 1);
10604 IEM_MC_LOCAL(uint32_t, u32Value);
10605 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10606 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10607 IEM_MC_ADVANCE_RIP();
10608 IEM_MC_END();
10609 return VINF_SUCCESS;
10610
10611 case IEMMODE_64BIT:
10612 IEM_MC_BEGIN(0, 1);
10613 IEM_MC_LOCAL(uint64_t, u64Value);
10614 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10615 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10616 IEM_MC_ADVANCE_RIP();
10617 IEM_MC_END();
10618 return VINF_SUCCESS;
10619
10620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10621 }
10622 }
10623 else
10624 {
10625 /*
10626 * We're loading a register from memory.
10627 */
10628 switch (pVCpu->iem.s.enmEffOpSize)
10629 {
10630 case IEMMODE_16BIT:
10631 IEM_MC_BEGIN(0, 2);
10632 IEM_MC_LOCAL(uint16_t, u16Value);
10633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10636 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10637 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10638 IEM_MC_ADVANCE_RIP();
10639 IEM_MC_END();
10640 return VINF_SUCCESS;
10641
10642 case IEMMODE_32BIT:
10643 IEM_MC_BEGIN(0, 2);
10644 IEM_MC_LOCAL(uint32_t, u32Value);
10645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10648 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10649 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10650 IEM_MC_ADVANCE_RIP();
10651 IEM_MC_END();
10652 return VINF_SUCCESS;
10653
10654 case IEMMODE_64BIT:
10655 IEM_MC_BEGIN(0, 2);
10656 IEM_MC_LOCAL(uint64_t, u64Value);
10657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10660 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10661 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10662 IEM_MC_ADVANCE_RIP();
10663 IEM_MC_END();
10664 return VINF_SUCCESS;
10665
10666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10667 }
10668 }
10669}
10670
10671
10672/** Opcode 0x0f 0xbf. */
10673FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
10674{
10675 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
10676 IEMOP_HLP_MIN_386();
10677
10678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10679
10680 /** @todo Not entirely sure how the operand size prefix is handled here,
10681 * assuming that it will be ignored. Would be nice to have a few
10682 * test for this. */
10683 /*
10684 * If rm is denoting a register, no more instruction bytes.
10685 */
10686 if (IEM_IS_MODRM_REG_MODE(bRm))
10687 {
10688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10689 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10690 {
10691 IEM_MC_BEGIN(0, 1);
10692 IEM_MC_LOCAL(uint32_t, u32Value);
10693 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10694 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10695 IEM_MC_ADVANCE_RIP();
10696 IEM_MC_END();
10697 }
10698 else
10699 {
10700 IEM_MC_BEGIN(0, 1);
10701 IEM_MC_LOCAL(uint64_t, u64Value);
10702 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10703 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10704 IEM_MC_ADVANCE_RIP();
10705 IEM_MC_END();
10706 }
10707 }
10708 else
10709 {
10710 /*
10711 * We're loading a register from memory.
10712 */
10713 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10714 {
10715 IEM_MC_BEGIN(0, 2);
10716 IEM_MC_LOCAL(uint32_t, u32Value);
10717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10720 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10721 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10722 IEM_MC_ADVANCE_RIP();
10723 IEM_MC_END();
10724 }
10725 else
10726 {
10727 IEM_MC_BEGIN(0, 2);
10728 IEM_MC_LOCAL(uint64_t, u64Value);
10729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10732 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10733 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10734 IEM_MC_ADVANCE_RIP();
10735 IEM_MC_END();
10736 }
10737 }
10738 return VINF_SUCCESS;
10739}
10740
10741
10742/** Opcode 0x0f 0xc0. */
10743FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
10744{
10745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10746 IEMOP_HLP_MIN_486();
10747 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
10748
10749 /*
10750 * If rm is denoting a register, no more instruction bytes.
10751 */
10752 if (IEM_IS_MODRM_REG_MODE(bRm))
10753 {
10754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10755
10756 IEM_MC_BEGIN(3, 0);
10757 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10758 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10759 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10760
10761 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10762 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10763 IEM_MC_REF_EFLAGS(pEFlags);
10764 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
10765
10766 IEM_MC_ADVANCE_RIP();
10767 IEM_MC_END();
10768 }
10769 else
10770 {
10771 /*
10772 * We're accessing memory.
10773 */
10774 IEM_MC_BEGIN(3, 3);
10775 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10776 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10777 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10778 IEM_MC_LOCAL(uint8_t, u8RegCopy);
10779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10780
10781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10782 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10783 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10784 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
10785 IEM_MC_FETCH_EFLAGS(EFlags);
10786 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10787 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
10788 else
10789 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
10790
10791 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10792 IEM_MC_COMMIT_EFLAGS(EFlags);
10793 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
10794 IEM_MC_ADVANCE_RIP();
10795 IEM_MC_END();
10796 return VINF_SUCCESS;
10797 }
10798 return VINF_SUCCESS;
10799}
10800
10801
10802/** Opcode 0x0f 0xc1. */
10803FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
10804{
10805 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
10806 IEMOP_HLP_MIN_486();
10807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10808
10809 /*
10810 * If rm is denoting a register, no more instruction bytes.
10811 */
10812 if (IEM_IS_MODRM_REG_MODE(bRm))
10813 {
10814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10815
10816 switch (pVCpu->iem.s.enmEffOpSize)
10817 {
10818 case IEMMODE_16BIT:
10819 IEM_MC_BEGIN(3, 0);
10820 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10821 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10822 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10823
10824 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10825 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10826 IEM_MC_REF_EFLAGS(pEFlags);
10827 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
10828
10829 IEM_MC_ADVANCE_RIP();
10830 IEM_MC_END();
10831 return VINF_SUCCESS;
10832
10833 case IEMMODE_32BIT:
10834 IEM_MC_BEGIN(3, 0);
10835 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10836 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10837 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10838
10839 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10840 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10841 IEM_MC_REF_EFLAGS(pEFlags);
10842 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
10843
10844 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10845 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10846 IEM_MC_ADVANCE_RIP();
10847 IEM_MC_END();
10848 return VINF_SUCCESS;
10849
10850 case IEMMODE_64BIT:
10851 IEM_MC_BEGIN(3, 0);
10852 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10853 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10854 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10855
10856 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10857 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
10858 IEM_MC_REF_EFLAGS(pEFlags);
10859 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
10860
10861 IEM_MC_ADVANCE_RIP();
10862 IEM_MC_END();
10863 return VINF_SUCCESS;
10864
10865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10866 }
10867 }
10868 else
10869 {
10870 /*
10871 * We're accessing memory.
10872 */
10873 switch (pVCpu->iem.s.enmEffOpSize)
10874 {
10875 case IEMMODE_16BIT:
10876 IEM_MC_BEGIN(3, 3);
10877 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10878 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10879 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10880 IEM_MC_LOCAL(uint16_t, u16RegCopy);
10881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10882
10883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10884 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10885 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10886 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
10887 IEM_MC_FETCH_EFLAGS(EFlags);
10888 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10889 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
10890 else
10891 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
10892
10893 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10894 IEM_MC_COMMIT_EFLAGS(EFlags);
10895 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
10896 IEM_MC_ADVANCE_RIP();
10897 IEM_MC_END();
10898 return VINF_SUCCESS;
10899
10900 case IEMMODE_32BIT:
10901 IEM_MC_BEGIN(3, 3);
10902 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10903 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10904 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10905 IEM_MC_LOCAL(uint32_t, u32RegCopy);
10906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10907
10908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10909 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10910 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10911 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
10912 IEM_MC_FETCH_EFLAGS(EFlags);
10913 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10914 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
10915 else
10916 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
10917
10918 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10919 IEM_MC_COMMIT_EFLAGS(EFlags);
10920 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
10921 IEM_MC_ADVANCE_RIP();
10922 IEM_MC_END();
10923 return VINF_SUCCESS;
10924
10925 case IEMMODE_64BIT:
10926 IEM_MC_BEGIN(3, 3);
10927 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10928 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10929 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10930 IEM_MC_LOCAL(uint64_t, u64RegCopy);
10931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10932
10933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10934 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10935 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
10936 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
10937 IEM_MC_FETCH_EFLAGS(EFlags);
10938 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10939 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
10940 else
10941 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
10942
10943 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10944 IEM_MC_COMMIT_EFLAGS(EFlags);
10945 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
10946 IEM_MC_ADVANCE_RIP();
10947 IEM_MC_END();
10948 return VINF_SUCCESS;
10949
10950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10951 }
10952 }
10953}
10954
10955
10956/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
10957FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
10958{
10959 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10960
10961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10962 if (IEM_IS_MODRM_REG_MODE(bRm))
10963 {
10964 /*
10965 * Register, register.
10966 */
10967 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10969 IEM_MC_BEGIN(4, 2);
10970 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
10971 IEM_MC_LOCAL(X86XMMREG, Dst);
10972 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
10973 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
10974 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
10975 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
10976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
10977 IEM_MC_PREPARE_SSE_USAGE();
10978 IEM_MC_REF_MXCSR(pfMxcsr);
10979 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
10980 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
10981 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
10982 IEM_MC_IF_MXCSR_XCPT_PENDING()
10983 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
10984 IEM_MC_ELSE()
10985 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
10986 IEM_MC_ENDIF();
10987
10988 IEM_MC_ADVANCE_RIP();
10989 IEM_MC_END();
10990 }
10991 else
10992 {
10993 /*
10994 * Register, memory.
10995 */
10996 IEM_MC_BEGIN(4, 3);
10997 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
10998 IEM_MC_LOCAL(X86XMMREG, Dst);
10999 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11000 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11001 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11003
11004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11005 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11006 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11008 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11009 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11010
11011 IEM_MC_PREPARE_SSE_USAGE();
11012 IEM_MC_REF_MXCSR(pfMxcsr);
11013 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11014 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11015 IEM_MC_IF_MXCSR_XCPT_PENDING()
11016 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11017 IEM_MC_ELSE()
11018 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11019 IEM_MC_ENDIF();
11020
11021 IEM_MC_ADVANCE_RIP();
11022 IEM_MC_END();
11023 }
11024 return VINF_SUCCESS;
11025}
11026
11027
11028/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11029FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11030{
11031 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11032
11033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11034 if (IEM_IS_MODRM_REG_MODE(bRm))
11035 {
11036 /*
11037 * Register, register.
11038 */
11039 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11041 IEM_MC_BEGIN(4, 2);
11042 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11043 IEM_MC_LOCAL(X86XMMREG, Dst);
11044 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11045 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11046 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11047 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11048 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11049 IEM_MC_PREPARE_SSE_USAGE();
11050 IEM_MC_REF_MXCSR(pfMxcsr);
11051 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11052 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11053 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11054 IEM_MC_IF_MXCSR_XCPT_PENDING()
11055 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11056 IEM_MC_ELSE()
11057 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11058 IEM_MC_ENDIF();
11059
11060 IEM_MC_ADVANCE_RIP();
11061 IEM_MC_END();
11062 }
11063 else
11064 {
11065 /*
11066 * Register, memory.
11067 */
11068 IEM_MC_BEGIN(4, 3);
11069 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11070 IEM_MC_LOCAL(X86XMMREG, Dst);
11071 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11072 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11073 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11075
11076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11077 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11078 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11080 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11081 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11082
11083 IEM_MC_PREPARE_SSE_USAGE();
11084 IEM_MC_REF_MXCSR(pfMxcsr);
11085 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11086 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11087 IEM_MC_IF_MXCSR_XCPT_PENDING()
11088 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11089 IEM_MC_ELSE()
11090 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11091 IEM_MC_ENDIF();
11092
11093 IEM_MC_ADVANCE_RIP();
11094 IEM_MC_END();
11095 }
11096 return VINF_SUCCESS;
11097}
11098
11099
11100/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11101FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11102{
11103 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11104
11105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11106 if (IEM_IS_MODRM_REG_MODE(bRm))
11107 {
11108 /*
11109 * Register, register.
11110 */
11111 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11113 IEM_MC_BEGIN(4, 2);
11114 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11115 IEM_MC_LOCAL(X86XMMREG, Dst);
11116 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11117 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11118 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11119 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11120 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11121 IEM_MC_PREPARE_SSE_USAGE();
11122 IEM_MC_REF_MXCSR(pfMxcsr);
11123 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11124 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11125 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11126 IEM_MC_IF_MXCSR_XCPT_PENDING()
11127 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11128 IEM_MC_ELSE()
11129 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11130 IEM_MC_ENDIF();
11131
11132 IEM_MC_ADVANCE_RIP();
11133 IEM_MC_END();
11134 }
11135 else
11136 {
11137 /*
11138 * Register, memory.
11139 */
11140 IEM_MC_BEGIN(4, 3);
11141 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11142 IEM_MC_LOCAL(X86XMMREG, Dst);
11143 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11144 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11145 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11147
11148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11149 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11150 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11152 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11153 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11154
11155 IEM_MC_PREPARE_SSE_USAGE();
11156 IEM_MC_REF_MXCSR(pfMxcsr);
11157 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11158 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11159 IEM_MC_IF_MXCSR_XCPT_PENDING()
11160 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11161 IEM_MC_ELSE()
11162 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11163 IEM_MC_ENDIF();
11164
11165 IEM_MC_ADVANCE_RIP();
11166 IEM_MC_END();
11167 }
11168 return VINF_SUCCESS;
11169}
11170
11171
11172/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11173FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11174{
11175 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11176
11177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11178 if (IEM_IS_MODRM_REG_MODE(bRm))
11179 {
11180 /*
11181 * Register, register.
11182 */
11183 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11185 IEM_MC_BEGIN(4, 2);
11186 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11187 IEM_MC_LOCAL(X86XMMREG, Dst);
11188 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11189 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11190 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11191 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11192 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11193 IEM_MC_PREPARE_SSE_USAGE();
11194 IEM_MC_REF_MXCSR(pfMxcsr);
11195 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11196 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11197 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11198 IEM_MC_IF_MXCSR_XCPT_PENDING()
11199 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11200 IEM_MC_ELSE()
11201 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11202 IEM_MC_ENDIF();
11203
11204 IEM_MC_ADVANCE_RIP();
11205 IEM_MC_END();
11206 }
11207 else
11208 {
11209 /*
11210 * Register, memory.
11211 */
11212 IEM_MC_BEGIN(4, 3);
11213 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11214 IEM_MC_LOCAL(X86XMMREG, Dst);
11215 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11216 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11217 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11219
11220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11221 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11222 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11225 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11226
11227 IEM_MC_PREPARE_SSE_USAGE();
11228 IEM_MC_REF_MXCSR(pfMxcsr);
11229 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11230 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11231 IEM_MC_IF_MXCSR_XCPT_PENDING()
11232 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11233 IEM_MC_ELSE()
11234 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11235 IEM_MC_ENDIF();
11236
11237 IEM_MC_ADVANCE_RIP();
11238 IEM_MC_END();
11239 }
11240 return VINF_SUCCESS;
11241}
11242
11243
11244/** Opcode 0x0f 0xc3. */
11245FNIEMOP_DEF(iemOp_movnti_My_Gy)
11246{
11247 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11248
11249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11250
11251 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11252 if (IEM_IS_MODRM_MEM_MODE(bRm))
11253 {
11254 switch (pVCpu->iem.s.enmEffOpSize)
11255 {
11256 case IEMMODE_32BIT:
11257 IEM_MC_BEGIN(0, 2);
11258 IEM_MC_LOCAL(uint32_t, u32Value);
11259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11260
11261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11263 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11264 return IEMOP_RAISE_INVALID_OPCODE();
11265
11266 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11267 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11268 IEM_MC_ADVANCE_RIP();
11269 IEM_MC_END();
11270 break;
11271
11272 case IEMMODE_64BIT:
11273 IEM_MC_BEGIN(0, 2);
11274 IEM_MC_LOCAL(uint64_t, u64Value);
11275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11276
11277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11279 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11280 return IEMOP_RAISE_INVALID_OPCODE();
11281
11282 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11283 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11284 IEM_MC_ADVANCE_RIP();
11285 IEM_MC_END();
11286 break;
11287
11288 case IEMMODE_16BIT:
11289 /** @todo check this form. */
11290 return IEMOP_RAISE_INVALID_OPCODE();
11291 }
11292 }
11293 else
11294 return IEMOP_RAISE_INVALID_OPCODE();
11295 return VINF_SUCCESS;
11296}
11297
11298
11299/* Opcode 0x66 0x0f 0xc3 - invalid */
11300/* Opcode 0xf3 0x0f 0xc3 - invalid */
11301/* Opcode 0xf2 0x0f 0xc3 - invalid */
11302
11303
11304/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11305FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11306{
11307 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11309 if (IEM_IS_MODRM_REG_MODE(bRm))
11310 {
11311 /*
11312 * Register, register.
11313 */
11314 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11316 IEM_MC_BEGIN(3, 0);
11317 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11318 IEM_MC_ARG(uint16_t, u16Src, 1);
11319 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11320 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11321 IEM_MC_PREPARE_FPU_USAGE();
11322 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11323 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11324 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11325 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11326 IEM_MC_FPU_TO_MMX_MODE();
11327 IEM_MC_ADVANCE_RIP();
11328 IEM_MC_END();
11329 }
11330 else
11331 {
11332 /*
11333 * Register, memory.
11334 */
11335 IEM_MC_BEGIN(3, 2);
11336 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11337 IEM_MC_ARG(uint16_t, u16Src, 1);
11338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11339
11340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11341 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11342 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11344 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11345 IEM_MC_PREPARE_FPU_USAGE();
11346
11347 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11348 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11349 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11350 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11351 IEM_MC_FPU_TO_MMX_MODE();
11352 IEM_MC_ADVANCE_RIP();
11353 IEM_MC_END();
11354 }
11355 return VINF_SUCCESS;
11356}
11357
11358
11359/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11360FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11361{
11362 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11364 if (IEM_IS_MODRM_REG_MODE(bRm))
11365 {
11366 /*
11367 * Register, register.
11368 */
11369 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11371 IEM_MC_BEGIN(3, 0);
11372 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11373 IEM_MC_ARG(uint16_t, u16Src, 1);
11374 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11375 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11376 IEM_MC_PREPARE_SSE_USAGE();
11377 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11378 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11379 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11380 IEM_MC_ADVANCE_RIP();
11381 IEM_MC_END();
11382 }
11383 else
11384 {
11385 /*
11386 * Register, memory.
11387 */
11388 IEM_MC_BEGIN(3, 2);
11389 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11390 IEM_MC_ARG(uint16_t, u16Src, 1);
11391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11392
11393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11394 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11395 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11397 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11398 IEM_MC_PREPARE_SSE_USAGE();
11399
11400 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11401 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11402 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11403 IEM_MC_ADVANCE_RIP();
11404 IEM_MC_END();
11405 }
11406 return VINF_SUCCESS;
11407}
11408
11409
11410/* Opcode 0xf3 0x0f 0xc4 - invalid */
11411/* Opcode 0xf2 0x0f 0xc4 - invalid */
11412
11413
11414/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11415FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11416{
11417 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pinsrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);*/ /** @todo */
11418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11419 if (IEM_IS_MODRM_REG_MODE(bRm))
11420 {
11421 /*
11422 * Register, register.
11423 */
11424 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11426 IEM_MC_BEGIN(3, 1);
11427 IEM_MC_LOCAL(uint16_t, u16Dst);
11428 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11429 IEM_MC_ARG(uint64_t, u64Src, 1);
11430 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11431 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11432 IEM_MC_PREPARE_FPU_USAGE();
11433 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11434 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bEvilArg);
11435 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11436 IEM_MC_FPU_TO_MMX_MODE();
11437 IEM_MC_ADVANCE_RIP();
11438 IEM_MC_END();
11439 return VINF_SUCCESS;
11440 }
11441
11442 /* No memory operand. */
11443 return IEMOP_RAISE_INVALID_OPCODE();
11444}
11445
11446
11447/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11448FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11449{
11450 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11452 if (IEM_IS_MODRM_REG_MODE(bRm))
11453 {
11454 /*
11455 * Register, register.
11456 */
11457 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11459 IEM_MC_BEGIN(3, 1);
11460 IEM_MC_LOCAL(uint16_t, u16Dst);
11461 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11462 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11463 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11464 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11465 IEM_MC_PREPARE_SSE_USAGE();
11466 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11467 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bEvilArg);
11468 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11469 IEM_MC_ADVANCE_RIP();
11470 IEM_MC_END();
11471 return VINF_SUCCESS;
11472 }
11473
11474 /* No memory operand. */
11475 return IEMOP_RAISE_INVALID_OPCODE();
11476}
11477
11478
11479/* Opcode 0xf3 0x0f 0xc5 - invalid */
11480/* Opcode 0xf2 0x0f 0xc5 - invalid */
11481
11482
11483/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11484FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11485{
11486 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11488 if (IEM_IS_MODRM_REG_MODE(bRm))
11489 {
11490 /*
11491 * Register, register.
11492 */
11493 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11495 IEM_MC_BEGIN(3, 0);
11496 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11497 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11498 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11499 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11500 IEM_MC_PREPARE_SSE_USAGE();
11501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11502 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11503 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11504 IEM_MC_ADVANCE_RIP();
11505 IEM_MC_END();
11506 }
11507 else
11508 {
11509 /*
11510 * Register, memory.
11511 */
11512 IEM_MC_BEGIN(3, 2);
11513 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11514 IEM_MC_LOCAL(RTUINT128U, uSrc);
11515 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11517
11518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11519 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11520 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11522 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11523 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11524
11525 IEM_MC_PREPARE_SSE_USAGE();
11526 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11527 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11528
11529 IEM_MC_ADVANCE_RIP();
11530 IEM_MC_END();
11531 }
11532 return VINF_SUCCESS;
11533}
11534
11535
11536/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
11537FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
11538{
11539 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11541 if (IEM_IS_MODRM_REG_MODE(bRm))
11542 {
11543 /*
11544 * Register, register.
11545 */
11546 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11548 IEM_MC_BEGIN(3, 0);
11549 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11550 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11551 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11552 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11553 IEM_MC_PREPARE_SSE_USAGE();
11554 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11555 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11556 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
11557 IEM_MC_ADVANCE_RIP();
11558 IEM_MC_END();
11559 }
11560 else
11561 {
11562 /*
11563 * Register, memory.
11564 */
11565 IEM_MC_BEGIN(3, 2);
11566 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11567 IEM_MC_LOCAL(RTUINT128U, uSrc);
11568 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11570
11571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11572 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11573 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11575 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11576 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11577
11578 IEM_MC_PREPARE_SSE_USAGE();
11579 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11580 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
11581
11582 IEM_MC_ADVANCE_RIP();
11583 IEM_MC_END();
11584 }
11585 return VINF_SUCCESS;
11586}
11587
11588
11589/* Opcode 0xf3 0x0f 0xc6 - invalid */
11590/* Opcode 0xf2 0x0f 0xc6 - invalid */
11591
11592
11593/** Opcode 0x0f 0xc7 !11/1. */
11594FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
11595{
11596 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
11597
11598 IEM_MC_BEGIN(4, 3);
11599 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
11600 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
11601 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
11602 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
11603 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
11604 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
11605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11606
11607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11608 IEMOP_HLP_DONE_DECODING();
11609 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11610
11611 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
11612 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
11613 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
11614
11615 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
11616 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
11617 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
11618
11619 IEM_MC_FETCH_EFLAGS(EFlags);
11620 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11621 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
11622 else
11623 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
11624
11625 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
11626 IEM_MC_COMMIT_EFLAGS(EFlags);
11627 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
11628 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
11629 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
11630 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
11631 IEM_MC_ENDIF();
11632 IEM_MC_ADVANCE_RIP();
11633
11634 IEM_MC_END();
11635 return VINF_SUCCESS;
11636}
11637
11638
11639/** Opcode REX.W 0x0f 0xc7 !11/1. */
11640FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
11641{
11642 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
11643 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
11644 {
11645#if 0
11646 RT_NOREF(bRm);
11647 IEMOP_BITCH_ABOUT_STUB();
11648 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
11649#else
11650 IEM_MC_BEGIN(4, 3);
11651 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
11652 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
11653 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
11654 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
11655 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
11656 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
11657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11658
11659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11660 IEMOP_HLP_DONE_DECODING();
11661 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
11662 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11663
11664 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
11665 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
11666 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
11667
11668 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
11669 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
11670 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
11671
11672 IEM_MC_FETCH_EFLAGS(EFlags);
11673# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
11674# if defined(RT_ARCH_AMD64)
11675 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
11676# endif
11677 {
11678 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11679 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11680 else
11681 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11682 }
11683# if defined(RT_ARCH_AMD64)
11684 else
11685# endif
11686# endif
11687# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
11688 {
11689 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
11690 accesses and not all all atomic, which works fine on in UNI CPU guest
11691 configuration (ignoring DMA). If guest SMP is active we have no choice
11692 but to use a rendezvous callback here. Sigh. */
11693 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
11694 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11695 else
11696 {
11697 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
11698 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
11699 }
11700 }
11701# endif
11702
11703 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
11704 IEM_MC_COMMIT_EFLAGS(EFlags);
11705 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
11706 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
11707 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
11708 IEM_MC_ENDIF();
11709 IEM_MC_ADVANCE_RIP();
11710
11711 IEM_MC_END();
11712 return VINF_SUCCESS;
11713#endif
11714 }
11715 Log(("cmpxchg16b -> #UD\n"));
11716 return IEMOP_RAISE_INVALID_OPCODE();
11717}
11718
11719FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
11720{
11721 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
11722 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
11723 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
11724}
11725
11726/** Opcode 0x0f 0xc7 11/6. */
11727FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
11728
11729/** Opcode 0x0f 0xc7 !11/6. */
11730#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11731FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
11732{
11733 IEMOP_MNEMONIC(vmptrld, "vmptrld");
11734 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
11735 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
11736 IEM_MC_BEGIN(2, 0);
11737 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11738 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
11739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11740 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
11741 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11742 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
11743 IEM_MC_END();
11744 return VINF_SUCCESS;
11745}
11746#else
11747FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
11748#endif
11749
11750/** Opcode 0x66 0x0f 0xc7 !11/6. */
11751#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11752FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
11753{
11754 IEMOP_MNEMONIC(vmclear, "vmclear");
11755 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
11756 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
11757 IEM_MC_BEGIN(2, 0);
11758 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11759 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
11760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11761 IEMOP_HLP_DONE_DECODING();
11762 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11763 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
11764 IEM_MC_END();
11765 return VINF_SUCCESS;
11766}
11767#else
11768FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
11769#endif
11770
11771/** Opcode 0xf3 0x0f 0xc7 !11/6. */
11772#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11773FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
11774{
11775 IEMOP_MNEMONIC(vmxon, "vmxon");
11776 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
11777 IEM_MC_BEGIN(2, 0);
11778 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11779 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
11780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11781 IEMOP_HLP_DONE_DECODING();
11782 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11783 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
11784 IEM_MC_END();
11785 return VINF_SUCCESS;
11786}
11787#else
11788FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
11789#endif
11790
11791/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
11792#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
11793FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
11794{
11795 IEMOP_MNEMONIC(vmptrst, "vmptrst");
11796 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
11797 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
11798 IEM_MC_BEGIN(2, 0);
11799 IEM_MC_ARG(uint8_t, iEffSeg, 0);
11800 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
11801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11802 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
11803 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11804 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
11805 IEM_MC_END();
11806 return VINF_SUCCESS;
11807}
11808#else
11809FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
11810#endif
11811
11812/** Opcode 0x0f 0xc7 11/7. */
11813FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
11814
11815
11816/**
11817 * Group 9 jump table for register variant.
11818 */
11819IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
11820{ /* pfx: none, 066h, 0f3h, 0f2h */
11821 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
11822 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
11823 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
11824 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
11825 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
11826 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
11827 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11828 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11829};
11830AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
11831
11832
11833/**
11834 * Group 9 jump table for memory variant.
11835 */
11836IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
11837{ /* pfx: none, 066h, 0f3h, 0f2h */
11838 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
11839 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
11840 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
11841 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
11842 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
11843 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
11844 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
11845 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
11846};
11847AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
11848
11849
11850/** Opcode 0x0f 0xc7. */
11851FNIEMOP_DEF(iemOp_Grp9)
11852{
11853 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
11854 if (IEM_IS_MODRM_REG_MODE(bRm))
11855 /* register, register */
11856 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
11857 + pVCpu->iem.s.idxPrefix], bRm);
11858 /* memory, register */
11859 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
11860 + pVCpu->iem.s.idxPrefix], bRm);
11861}
11862
11863
11864/**
11865 * Common 'bswap register' helper.
11866 */
11867FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
11868{
11869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11870 switch (pVCpu->iem.s.enmEffOpSize)
11871 {
11872 case IEMMODE_16BIT:
11873 IEM_MC_BEGIN(1, 0);
11874 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11875 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
11876 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
11877 IEM_MC_ADVANCE_RIP();
11878 IEM_MC_END();
11879 return VINF_SUCCESS;
11880
11881 case IEMMODE_32BIT:
11882 IEM_MC_BEGIN(1, 0);
11883 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11884 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
11885 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11886 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
11887 IEM_MC_ADVANCE_RIP();
11888 IEM_MC_END();
11889 return VINF_SUCCESS;
11890
11891 case IEMMODE_64BIT:
11892 IEM_MC_BEGIN(1, 0);
11893 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11894 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
11895 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
11896 IEM_MC_ADVANCE_RIP();
11897 IEM_MC_END();
11898 return VINF_SUCCESS;
11899
11900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11901 }
11902}
11903
11904
11905/** Opcode 0x0f 0xc8. */
11906FNIEMOP_DEF(iemOp_bswap_rAX_r8)
11907{
11908 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
11909 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
11910 prefix. REX.B is the correct prefix it appears. For a parallel
11911 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
11912 IEMOP_HLP_MIN_486();
11913 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
11914}
11915
11916
11917/** Opcode 0x0f 0xc9. */
11918FNIEMOP_DEF(iemOp_bswap_rCX_r9)
11919{
11920 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
11921 IEMOP_HLP_MIN_486();
11922 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
11923}
11924
11925
11926/** Opcode 0x0f 0xca. */
11927FNIEMOP_DEF(iemOp_bswap_rDX_r10)
11928{
11929 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
11930 IEMOP_HLP_MIN_486();
11931 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
11932}
11933
11934
11935/** Opcode 0x0f 0xcb. */
11936FNIEMOP_DEF(iemOp_bswap_rBX_r11)
11937{
11938 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
11939 IEMOP_HLP_MIN_486();
11940 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
11941}
11942
11943
11944/** Opcode 0x0f 0xcc. */
11945FNIEMOP_DEF(iemOp_bswap_rSP_r12)
11946{
11947 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
11948 IEMOP_HLP_MIN_486();
11949 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
11950}
11951
11952
11953/** Opcode 0x0f 0xcd. */
11954FNIEMOP_DEF(iemOp_bswap_rBP_r13)
11955{
11956 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
11957 IEMOP_HLP_MIN_486();
11958 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
11959}
11960
11961
11962/** Opcode 0x0f 0xce. */
11963FNIEMOP_DEF(iemOp_bswap_rSI_r14)
11964{
11965 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
11966 IEMOP_HLP_MIN_486();
11967 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
11968}
11969
11970
11971/** Opcode 0x0f 0xcf. */
11972FNIEMOP_DEF(iemOp_bswap_rDI_r15)
11973{
11974 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
11975 IEMOP_HLP_MIN_486();
11976 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
11977}
11978
11979
11980/* Opcode 0x0f 0xd0 - invalid */
11981
11982
11983/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
11984FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
11985{
11986 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
11987 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
11988}
11989
11990
11991/* Opcode 0xf3 0x0f 0xd0 - invalid */
11992
11993
11994/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
11995FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
11996{
11997 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
11998 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
11999}
12000
12001
12002
12003/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12004FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12005{
12006 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12007 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12008}
12009
12010/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12011FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12012{
12013 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12014 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12015}
12016
12017/* Opcode 0xf3 0x0f 0xd1 - invalid */
12018/* Opcode 0xf2 0x0f 0xd1 - invalid */
12019
12020/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12021FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12022{
12023 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12024 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12025}
12026
12027
12028/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12029FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12030{
12031 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12032 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12033}
12034
12035
12036/* Opcode 0xf3 0x0f 0xd2 - invalid */
12037/* Opcode 0xf2 0x0f 0xd2 - invalid */
12038
12039/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12040FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12041{
12042 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12043 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12044}
12045
12046
12047/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12048FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12049{
12050 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12051 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12052}
12053
12054
12055/* Opcode 0xf3 0x0f 0xd3 - invalid */
12056/* Opcode 0xf2 0x0f 0xd3 - invalid */
12057
12058
12059/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12060FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12061{
12062 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12063 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12064}
12065
12066
12067/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12068FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12069{
12070 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12071 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12072}
12073
12074
12075/* Opcode 0xf3 0x0f 0xd4 - invalid */
12076/* Opcode 0xf2 0x0f 0xd4 - invalid */
12077
12078/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12079FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12080{
12081 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12082 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12083}
12084
12085/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12086FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12087{
12088 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12089 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12090}
12091
12092
12093/* Opcode 0xf3 0x0f 0xd5 - invalid */
12094/* Opcode 0xf2 0x0f 0xd5 - invalid */
12095
12096/* Opcode 0x0f 0xd6 - invalid */
12097
12098/**
12099 * @opcode 0xd6
12100 * @oppfx 0x66
12101 * @opcpuid sse2
12102 * @opgroup og_sse2_pcksclr_datamove
12103 * @opxcpttype none
12104 * @optest op1=-1 op2=2 -> op1=2
12105 * @optest op1=0 op2=-42 -> op1=-42
12106 */
12107FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12108{
12109 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12111 if (IEM_IS_MODRM_REG_MODE(bRm))
12112 {
12113 /*
12114 * Register, register.
12115 */
12116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12117 IEM_MC_BEGIN(0, 2);
12118 IEM_MC_LOCAL(uint64_t, uSrc);
12119
12120 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12121 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12122
12123 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12124 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12125
12126 IEM_MC_ADVANCE_RIP();
12127 IEM_MC_END();
12128 }
12129 else
12130 {
12131 /*
12132 * Memory, register.
12133 */
12134 IEM_MC_BEGIN(0, 2);
12135 IEM_MC_LOCAL(uint64_t, uSrc);
12136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12137
12138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12140 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12141 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12142
12143 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12144 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12145
12146 IEM_MC_ADVANCE_RIP();
12147 IEM_MC_END();
12148 }
12149 return VINF_SUCCESS;
12150}
12151
12152
12153/**
12154 * @opcode 0xd6
12155 * @opcodesub 11 mr/reg
12156 * @oppfx f3
12157 * @opcpuid sse2
12158 * @opgroup og_sse2_simdint_datamove
12159 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12160 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12161 */
12162FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12163{
12164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12165 if (IEM_IS_MODRM_REG_MODE(bRm))
12166 {
12167 /*
12168 * Register, register.
12169 */
12170 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12172 IEM_MC_BEGIN(0, 1);
12173 IEM_MC_LOCAL(uint64_t, uSrc);
12174
12175 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12176 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12177
12178 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12179 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12180 IEM_MC_FPU_TO_MMX_MODE();
12181
12182 IEM_MC_ADVANCE_RIP();
12183 IEM_MC_END();
12184 return VINF_SUCCESS;
12185 }
12186
12187 /**
12188 * @opdone
12189 * @opmnemonic udf30fd6mem
12190 * @opcode 0xd6
12191 * @opcodesub !11 mr/reg
12192 * @oppfx f3
12193 * @opunused intel-modrm
12194 * @opcpuid sse
12195 * @optest ->
12196 */
12197 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12198}
12199
12200
12201/**
12202 * @opcode 0xd6
12203 * @opcodesub 11 mr/reg
12204 * @oppfx f2
12205 * @opcpuid sse2
12206 * @opgroup og_sse2_simdint_datamove
12207 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12208 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12209 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12210 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12211 * @optest op1=-42 op2=0xfedcba9876543210
12212 * -> op1=0xfedcba9876543210 ftw=0xff
12213 */
12214FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12215{
12216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12217 if (IEM_IS_MODRM_REG_MODE(bRm))
12218 {
12219 /*
12220 * Register, register.
12221 */
12222 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12224 IEM_MC_BEGIN(0, 1);
12225 IEM_MC_LOCAL(uint64_t, uSrc);
12226
12227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12228 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12229
12230 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12231 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12232 IEM_MC_FPU_TO_MMX_MODE();
12233
12234 IEM_MC_ADVANCE_RIP();
12235 IEM_MC_END();
12236 return VINF_SUCCESS;
12237 }
12238
12239 /**
12240 * @opdone
12241 * @opmnemonic udf20fd6mem
12242 * @opcode 0xd6
12243 * @opcodesub !11 mr/reg
12244 * @oppfx f2
12245 * @opunused intel-modrm
12246 * @opcpuid sse
12247 * @optest ->
12248 */
12249 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12250}
12251
12252
12253/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12254FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12255{
12256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12257 /* Docs says register only. */
12258 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12259 {
12260 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12261 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
12262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12263 IEM_MC_BEGIN(2, 0);
12264 IEM_MC_ARG(uint64_t *, puDst, 0);
12265 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12267 IEM_MC_PREPARE_FPU_USAGE();
12268 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
12269 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12270 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12271 IEM_MC_FPU_TO_MMX_MODE();
12272 IEM_MC_ADVANCE_RIP();
12273 IEM_MC_END();
12274 return VINF_SUCCESS;
12275 }
12276 return IEMOP_RAISE_INVALID_OPCODE();
12277}
12278
12279
12280/** Opcode 0x66 0x0f 0xd7 - */
12281FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12282{
12283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12284 /* Docs says register only. */
12285 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12286 {
12287 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12288 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
12289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12290 IEM_MC_BEGIN(2, 0);
12291 IEM_MC_ARG(uint64_t *, puDst, 0);
12292 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12293 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12294 IEM_MC_PREPARE_SSE_USAGE();
12295 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12296 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12297 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12298 IEM_MC_ADVANCE_RIP();
12299 IEM_MC_END();
12300 return VINF_SUCCESS;
12301 }
12302 return IEMOP_RAISE_INVALID_OPCODE();
12303}
12304
12305
12306/* Opcode 0xf3 0x0f 0xd7 - invalid */
12307/* Opcode 0xf2 0x0f 0xd7 - invalid */
12308
12309
12310/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12311FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12312{
12313 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12314 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12315}
12316
12317
12318/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12319FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12320{
12321 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12322 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12323}
12324
12325
12326/* Opcode 0xf3 0x0f 0xd8 - invalid */
12327/* Opcode 0xf2 0x0f 0xd8 - invalid */
12328
12329/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12330FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12331{
12332 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12333 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12334}
12335
12336
12337/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12338FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12339{
12340 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12341 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12342}
12343
12344
12345/* Opcode 0xf3 0x0f 0xd9 - invalid */
12346/* Opcode 0xf2 0x0f 0xd9 - invalid */
12347
12348/** Opcode 0x0f 0xda - pminub Pq, Qq */
12349FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12350{
12351 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12352 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12353}
12354
12355
12356/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12357FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12358{
12359 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12360 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12361}
12362
12363/* Opcode 0xf3 0x0f 0xda - invalid */
12364/* Opcode 0xf2 0x0f 0xda - invalid */
12365
12366/** Opcode 0x0f 0xdb - pand Pq, Qq */
12367FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12368{
12369 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12370 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12371}
12372
12373
12374/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12375FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12376{
12377 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12378 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12379}
12380
12381
12382/* Opcode 0xf3 0x0f 0xdb - invalid */
12383/* Opcode 0xf2 0x0f 0xdb - invalid */
12384
12385/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12386FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12387{
12388 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12389 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12390}
12391
12392
12393/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12394FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12395{
12396 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12397 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12398}
12399
12400
12401/* Opcode 0xf3 0x0f 0xdc - invalid */
12402/* Opcode 0xf2 0x0f 0xdc - invalid */
12403
12404/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12405FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12406{
12407 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12408 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12409}
12410
12411
12412/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
12413FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
12414{
12415 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12416 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
12417}
12418
12419
12420/* Opcode 0xf3 0x0f 0xdd - invalid */
12421/* Opcode 0xf2 0x0f 0xdd - invalid */
12422
12423/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
12424FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
12425{
12426 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12427 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
12428}
12429
12430
12431/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
12432FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
12433{
12434 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12435 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
12436}
12437
12438/* Opcode 0xf3 0x0f 0xde - invalid */
12439/* Opcode 0xf2 0x0f 0xde - invalid */
12440
12441
12442/** Opcode 0x0f 0xdf - pandn Pq, Qq */
12443FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
12444{
12445 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12446 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
12447}
12448
12449
12450/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
12451FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
12452{
12453 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12454 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
12455}
12456
12457
12458/* Opcode 0xf3 0x0f 0xdf - invalid */
12459/* Opcode 0xf2 0x0f 0xdf - invalid */
12460
12461/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
12462FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
12463{
12464 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12465 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
12466}
12467
12468
12469/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
12470FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
12471{
12472 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12473 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
12474}
12475
12476
12477/* Opcode 0xf3 0x0f 0xe0 - invalid */
12478/* Opcode 0xf2 0x0f 0xe0 - invalid */
12479
12480/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
12481FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
12482{
12483 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12484 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
12485}
12486
12487
12488/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
12489FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
12490{
12491 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12492 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
12493}
12494
12495
12496/* Opcode 0xf3 0x0f 0xe1 - invalid */
12497/* Opcode 0xf2 0x0f 0xe1 - invalid */
12498
12499/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
12500FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
12501{
12502 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12503 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
12504}
12505
12506
12507/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
12508FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
12509{
12510 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12511 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
12512}
12513
12514
12515/* Opcode 0xf3 0x0f 0xe2 - invalid */
12516/* Opcode 0xf2 0x0f 0xe2 - invalid */
12517
12518/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
12519FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
12520{
12521 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12522 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
12523}
12524
12525
12526/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
12527FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
12528{
12529 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12530 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
12531}
12532
12533
12534/* Opcode 0xf3 0x0f 0xe3 - invalid */
12535/* Opcode 0xf2 0x0f 0xe3 - invalid */
12536
12537/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
12538FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
12539{
12540 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12541 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
12542}
12543
12544
12545/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
12546FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
12547{
12548 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12549 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
12550}
12551
12552
12553/* Opcode 0xf3 0x0f 0xe4 - invalid */
12554/* Opcode 0xf2 0x0f 0xe4 - invalid */
12555
12556/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
12557FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
12558{
12559 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12560 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
12561}
12562
12563
12564/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
12565FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
12566{
12567 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12568 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
12569}
12570
12571
12572/* Opcode 0xf3 0x0f 0xe5 - invalid */
12573/* Opcode 0xf2 0x0f 0xe5 - invalid */
12574
12575/* Opcode 0x0f 0xe6 - invalid */
12576/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
12577FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
12578/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
12579FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
12580/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
12581FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
12582
12583
12584/**
12585 * @opcode 0xe7
12586 * @opcodesub !11 mr/reg
12587 * @oppfx none
12588 * @opcpuid sse
12589 * @opgroup og_sse1_cachect
12590 * @opxcpttype none
12591 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
12592 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12593 */
12594FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
12595{
12596 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12598 if (IEM_IS_MODRM_MEM_MODE(bRm))
12599 {
12600 /* Register, memory. */
12601 IEM_MC_BEGIN(0, 2);
12602 IEM_MC_LOCAL(uint64_t, uSrc);
12603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12604
12605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12607 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12608 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12609
12610 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
12611 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12612 IEM_MC_FPU_TO_MMX_MODE();
12613
12614 IEM_MC_ADVANCE_RIP();
12615 IEM_MC_END();
12616 return VINF_SUCCESS;
12617 }
12618 /**
12619 * @opdone
12620 * @opmnemonic ud0fe7reg
12621 * @opcode 0xe7
12622 * @opcodesub 11 mr/reg
12623 * @oppfx none
12624 * @opunused immediate
12625 * @opcpuid sse
12626 * @optest ->
12627 */
12628 return IEMOP_RAISE_INVALID_OPCODE();
12629}
12630
12631/**
12632 * @opcode 0xe7
12633 * @opcodesub !11 mr/reg
12634 * @oppfx 0x66
12635 * @opcpuid sse2
12636 * @opgroup og_sse2_cachect
12637 * @opxcpttype 1
12638 * @optest op1=-1 op2=2 -> op1=2
12639 * @optest op1=0 op2=-42 -> op1=-42
12640 */
12641FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
12642{
12643 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12645 if (IEM_IS_MODRM_MEM_MODE(bRm))
12646 {
12647 /* Register, memory. */
12648 IEM_MC_BEGIN(0, 2);
12649 IEM_MC_LOCAL(RTUINT128U, uSrc);
12650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12651
12652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12654 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12656
12657 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12658 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12659
12660 IEM_MC_ADVANCE_RIP();
12661 IEM_MC_END();
12662 return VINF_SUCCESS;
12663 }
12664
12665 /**
12666 * @opdone
12667 * @opmnemonic ud660fe7reg
12668 * @opcode 0xe7
12669 * @opcodesub 11 mr/reg
12670 * @oppfx 0x66
12671 * @opunused immediate
12672 * @opcpuid sse
12673 * @optest ->
12674 */
12675 return IEMOP_RAISE_INVALID_OPCODE();
12676}
12677
12678/* Opcode 0xf3 0x0f 0xe7 - invalid */
12679/* Opcode 0xf2 0x0f 0xe7 - invalid */
12680
12681
12682/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
12683FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
12684{
12685 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12686 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
12687}
12688
12689
12690/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
12691FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
12692{
12693 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12694 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
12695}
12696
12697
12698/* Opcode 0xf3 0x0f 0xe8 - invalid */
12699/* Opcode 0xf2 0x0f 0xe8 - invalid */
12700
12701/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
12702FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
12703{
12704 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12705 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
12706}
12707
12708
12709/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
12710FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
12711{
12712 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12713 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
12714}
12715
12716
12717/* Opcode 0xf3 0x0f 0xe9 - invalid */
12718/* Opcode 0xf2 0x0f 0xe9 - invalid */
12719
12720
12721/** Opcode 0x0f 0xea - pminsw Pq, Qq */
12722FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
12723{
12724 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12725 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
12726}
12727
12728
12729/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
12730FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
12731{
12732 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12733 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
12734}
12735
12736
12737/* Opcode 0xf3 0x0f 0xea - invalid */
12738/* Opcode 0xf2 0x0f 0xea - invalid */
12739
12740
12741/** Opcode 0x0f 0xeb - por Pq, Qq */
12742FNIEMOP_DEF(iemOp_por_Pq_Qq)
12743{
12744 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12745 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
12746}
12747
12748
12749/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
12750FNIEMOP_DEF(iemOp_por_Vx_Wx)
12751{
12752 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12753 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
12754}
12755
12756
12757/* Opcode 0xf3 0x0f 0xeb - invalid */
12758/* Opcode 0xf2 0x0f 0xeb - invalid */
12759
12760/** Opcode 0x0f 0xec - paddsb Pq, Qq */
12761FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
12762{
12763 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12764 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
12765}
12766
12767
12768/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
12769FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
12770{
12771 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12772 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
12773}
12774
12775
12776/* Opcode 0xf3 0x0f 0xec - invalid */
12777/* Opcode 0xf2 0x0f 0xec - invalid */
12778
12779/** Opcode 0x0f 0xed - paddsw Pq, Qq */
12780FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
12781{
12782 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12783 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
12784}
12785
12786
12787/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
12788FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
12789{
12790 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12791 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
12792}
12793
12794
12795/* Opcode 0xf3 0x0f 0xed - invalid */
12796/* Opcode 0xf2 0x0f 0xed - invalid */
12797
12798
12799/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
12800FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
12801{
12802 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12803 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
12804}
12805
12806
12807/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
12808FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
12809{
12810 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12811 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
12812}
12813
12814
12815/* Opcode 0xf3 0x0f 0xee - invalid */
12816/* Opcode 0xf2 0x0f 0xee - invalid */
12817
12818
12819/** Opcode 0x0f 0xef - pxor Pq, Qq */
12820FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
12821{
12822 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12823 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
12824}
12825
12826
12827/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
12828FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
12829{
12830 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12831 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
12832}
12833
12834
12835/* Opcode 0xf3 0x0f 0xef - invalid */
12836/* Opcode 0xf2 0x0f 0xef - invalid */
12837
12838/* Opcode 0x0f 0xf0 - invalid */
12839/* Opcode 0x66 0x0f 0xf0 - invalid */
12840
12841
12842/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
12843FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
12844{
12845 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12847 if (IEM_IS_MODRM_REG_MODE(bRm))
12848 {
12849 /*
12850 * Register, register - (not implemented, assuming it raises \#UD).
12851 */
12852 return IEMOP_RAISE_INVALID_OPCODE();
12853 }
12854 else
12855 {
12856 /*
12857 * Register, memory.
12858 */
12859 IEM_MC_BEGIN(0, 2);
12860 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
12861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12862
12863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12865 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
12866 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12867 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12868 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
12869
12870 IEM_MC_ADVANCE_RIP();
12871 IEM_MC_END();
12872 }
12873 return VINF_SUCCESS;
12874}
12875
12876
12877/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
12878FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
12879{
12880 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12881 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
12882}
12883
12884
12885/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
12886FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
12887{
12888 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12889 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
12890}
12891
12892
12893/* Opcode 0xf2 0x0f 0xf1 - invalid */
12894
12895/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
12896FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
12897{
12898 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12899 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
12900}
12901
12902
12903/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
12904FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
12905{
12906 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12907 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
12908}
12909
12910
12911/* Opcode 0xf2 0x0f 0xf2 - invalid */
12912
12913/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
12914FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
12915{
12916 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12917 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
12918}
12919
12920
12921/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
12922FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
12923{
12924 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12925 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
12926}
12927
12928/* Opcode 0xf2 0x0f 0xf3 - invalid */
12929
12930/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
12931FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
12932{
12933 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12934 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
12935}
12936
12937
12938/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
12939FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
12940{
12941 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12942 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
12943}
12944
12945
12946/* Opcode 0xf2 0x0f 0xf4 - invalid */
12947
12948/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
12949FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
12950{
12951 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12952 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
12953}
12954
12955
12956/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
12957FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
12958{
12959 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12960 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
12961}
12962
12963/* Opcode 0xf2 0x0f 0xf5 - invalid */
12964
12965/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
12966FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
12967{
12968 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12969 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
12970}
12971
12972
12973/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
12974FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
12975{
12976 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12977 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
12978}
12979
12980
12981/* Opcode 0xf2 0x0f 0xf6 - invalid */
12982
12983/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
12984FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
12985/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
12986FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
12987/* Opcode 0xf2 0x0f 0xf7 - invalid */
12988
12989
12990/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
12991FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
12992{
12993 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12994 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
12995}
12996
12997
12998/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
12999FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13000{
13001 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13002 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13003}
13004
13005
13006/* Opcode 0xf2 0x0f 0xf8 - invalid */
13007
13008
13009/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13010FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13011{
13012 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13013 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13014}
13015
13016
13017/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13018FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13019{
13020 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13021 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13022}
13023
13024
13025/* Opcode 0xf2 0x0f 0xf9 - invalid */
13026
13027
13028/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13029FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13030{
13031 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13032 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13033}
13034
13035
13036/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13037FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13038{
13039 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13040 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13041}
13042
13043
13044/* Opcode 0xf2 0x0f 0xfa - invalid */
13045
13046
13047/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13048FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13049{
13050 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13051 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13052}
13053
13054
13055/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13056FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13057{
13058 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13059 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13060}
13061
13062
13063/* Opcode 0xf2 0x0f 0xfb - invalid */
13064
13065
13066/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13067FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13068{
13069 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13070 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13071}
13072
13073
13074/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13075FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13076{
13077 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13078 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13079}
13080
13081
13082/* Opcode 0xf2 0x0f 0xfc - invalid */
13083
13084
13085/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13086FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13087{
13088 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13089 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13090}
13091
13092
13093/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13094FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13095{
13096 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13097 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13098}
13099
13100
13101/* Opcode 0xf2 0x0f 0xfd - invalid */
13102
13103
13104/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13105FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13106{
13107 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13108 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13109}
13110
13111
13112/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13113FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13114{
13115 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13116 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13117}
13118
13119
13120/* Opcode 0xf2 0x0f 0xfe - invalid */
13121
13122
13123/** Opcode **** 0x0f 0xff - UD0 */
13124FNIEMOP_DEF(iemOp_ud0)
13125{
13126 IEMOP_MNEMONIC(ud0, "ud0");
13127 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13128 {
13129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13130#ifndef TST_IEM_CHECK_MC
13131 if (IEM_IS_MODRM_MEM_MODE(bRm))
13132 {
13133 RTGCPTR GCPtrEff;
13134 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13135 if (rcStrict != VINF_SUCCESS)
13136 return rcStrict;
13137 }
13138#endif
13139 IEMOP_HLP_DONE_DECODING();
13140 }
13141 return IEMOP_RAISE_INVALID_OPCODE();
13142}
13143
13144
13145
13146/**
13147 * Two byte opcode map, first byte 0x0f.
13148 *
13149 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13150 * check if it needs updating as well when making changes.
13151 */
13152IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13153{
13154 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13155 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13156 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13157 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13158 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13159 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13160 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13161 /* 0x06 */ IEMOP_X4(iemOp_clts),
13162 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13163 /* 0x08 */ IEMOP_X4(iemOp_invd),
13164 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13165 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13166 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13167 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13168 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13169 /* 0x0e */ IEMOP_X4(iemOp_femms),
13170 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13171
13172 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13173 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13174 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13175 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13176 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13177 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13178 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13179 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13180 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13181 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13182 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13183 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13184 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13185 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13186 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13187 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13188
13189 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13190 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13191 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13192 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13193 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13194 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13195 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13196 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13197 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13198 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13199 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13200 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13201 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13202 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13203 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13204 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13205
13206 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13207 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13208 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13209 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13210 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13211 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13212 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13213 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13214 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13215 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13216 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13217 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13218 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13219 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13220 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13221 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13222
13223 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13224 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13225 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13226 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13227 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13228 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13229 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13230 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13231 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13232 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13233 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13234 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13235 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13236 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13237 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13238 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13239
13240 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13241 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13242 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13243 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13244 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13245 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13246 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13247 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13248 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13249 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13250 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13251 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13252 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13253 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13254 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13255 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13256
13257 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13258 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13259 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13260 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13261 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13262 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13263 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13264 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13265 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13266 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13267 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13268 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13269 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13270 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13271 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13272 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13273
13274 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13275 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13276 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13277 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13278 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13279 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13280 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13281 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13282
13283 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13284 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13285 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13286 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13287 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13288 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13289 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13290 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13291
13292 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13293 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13294 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13295 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13296 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13297 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13298 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13299 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13300 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13301 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13302 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13303 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13304 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13305 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13306 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13307 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13308
13309 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13310 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13311 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13312 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13313 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13314 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13315 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13316 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13317 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13318 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13319 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13320 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13321 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13322 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13323 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13324 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13325
13326 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13327 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13328 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13329 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13330 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13331 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13332 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13333 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13334 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13335 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13336 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13337 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13338 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13339 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13340 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13341 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13342
13343 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13344 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13345 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13346 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13347 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13348 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13349 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13350 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13351 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13352 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13353 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13354 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13355 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13356 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13357 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13358 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13359
13360 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13361 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13362 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13363 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13364 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13365 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13366 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13367 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13368 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13369 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13370 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13371 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13372 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13373 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13374 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13375 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13376
13377 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13378 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13379 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13380 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13381 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13382 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13383 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13384 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13385 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13386 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13387 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13388 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13389 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13390 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13391 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13392 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13393
13394 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13395 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13396 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13397 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13398 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13399 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13400 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13401 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13402 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13403 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13404 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13405 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13406 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13407 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13408 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13409 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13410
13411 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
13412 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13413 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13414 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13415 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13416 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13417 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13418 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13419 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13420 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13421 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13422 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13423 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13424 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13425 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13426 /* 0xff */ IEMOP_X4(iemOp_ud0),
13427};
13428AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
13429
13430/** @} */
13431
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette