VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 99317

Last change on this file since 99317 was 99307, checked in by vboxsync, 21 months ago

VMM/IEM: More work on processing MC blocks, mainly related to reducing number of parameters for MC blocks in threaded function mode. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 495.7 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 99307 2023-04-06 02:08:28Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
115 IEM_MC_BEGIN(2, 0);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
175 IEM_MC_BEGIN(2, 0);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
238 IEM_MC_BEGIN(2, 0);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
298 IEM_MC_BEGIN(2, 0);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
360 IEM_MC_BEGIN(2, 0);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
415 IEM_MC_BEGIN(2, 0);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
439 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 IEM_MC_BEGIN(2, 0);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
497 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
526 IEM_MC_BEGIN(2, 0);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
588 IEM_MC_BEGIN(2, 0);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
648 IEM_MC_BEGIN(2, 0);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
672 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
708 IEM_MC_BEGIN(2, 0);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
770 IEM_MC_BEGIN(2, 0);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
830 IEM_MC_BEGIN(3, 1);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
894 IEM_MC_BEGIN(3, 1);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
958 IEM_MC_BEGIN(3, 1);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
989 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1022 IEM_MC_BEGIN(3, 1);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1086 IEM_MC_BEGIN(2, 0);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1146 IEM_MC_BEGIN(3, 1);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1177 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1229 }
1230
1231 /* Ignore operand size here, memory refs are always 16-bit. */
1232 IEM_MC_BEGIN(2, 0);
1233 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1238 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1253 IEM_MC_BEGIN(1, 0);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1284 IEM_MC_BEGIN(1, 0);
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1314 IEM_MC_BEGIN(2, 0);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 IEMOP_HLP_MIN_286();
1341 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1342}
1343
1344
1345/** Opcode 0x0f 0x00 /5. */
1346FNIEMOPRM_DEF(iemOp_Grp6_verw)
1347{
1348 IEMOP_MNEMONIC(verw, "verw Ew");
1349 IEMOP_HLP_MIN_286();
1350 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1351}
1352
1353
1354/**
1355 * Group 6 jump table.
1356 */
1357IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1358{
1359 iemOp_Grp6_sldt,
1360 iemOp_Grp6_str,
1361 iemOp_Grp6_lldt,
1362 iemOp_Grp6_ltr,
1363 iemOp_Grp6_verr,
1364 iemOp_Grp6_verw,
1365 iemOp_InvalidWithRM,
1366 iemOp_InvalidWithRM
1367};
1368
1369/** Opcode 0x0f 0x00. */
1370FNIEMOP_DEF(iemOp_Grp6)
1371{
1372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1373 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1379{
1380 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1381 IEMOP_HLP_MIN_286();
1382 IEMOP_HLP_64BIT_OP_SIZE();
1383 IEM_MC_BEGIN(2, 1);
1384 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1385 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1389 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1390 IEM_MC_END();
1391}
1392
1393
1394/** Opcode 0x0f 0x01 /0. */
1395FNIEMOP_DEF(iemOp_Grp7_vmcall)
1396{
1397 IEMOP_MNEMONIC(vmcall, "vmcall");
1398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1399
1400 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1401 want all hypercalls regardless of instruction used, and if a
1402 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1403 (NEM/win makes ASSUMPTIONS about this behavior.) */
1404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1405}
1406
1407
1408/** Opcode 0x0f 0x01 /0. */
1409#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1410FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1411{
1412 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1413 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1414 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1415 IEMOP_HLP_DONE_DECODING();
1416 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1417}
1418#else
1419FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1420{
1421 IEMOP_BITCH_ABOUT_STUB();
1422 return IEMOP_RAISE_INVALID_OPCODE();
1423}
1424#endif
1425
1426
1427/** Opcode 0x0f 0x01 /0. */
1428#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1429FNIEMOP_DEF(iemOp_Grp7_vmresume)
1430{
1431 IEMOP_MNEMONIC(vmresume, "vmresume");
1432 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1433 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1434 IEMOP_HLP_DONE_DECODING();
1435 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1436}
1437#else
1438FNIEMOP_DEF(iemOp_Grp7_vmresume)
1439{
1440 IEMOP_BITCH_ABOUT_STUB();
1441 return IEMOP_RAISE_INVALID_OPCODE();
1442}
1443#endif
1444
1445
1446/** Opcode 0x0f 0x01 /0. */
1447#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1448FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1449{
1450 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1451 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1452 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1453 IEMOP_HLP_DONE_DECODING();
1454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1455}
1456#else
1457FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1458{
1459 IEMOP_BITCH_ABOUT_STUB();
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462#endif
1463
1464
1465/** Opcode 0x0f 0x01 /1. */
1466FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1467{
1468 IEMOP_MNEMONIC(sidt, "sidt Ms");
1469 IEMOP_HLP_MIN_286();
1470 IEMOP_HLP_64BIT_OP_SIZE();
1471 IEM_MC_BEGIN(2, 1);
1472 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1473 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1476 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1477 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1478 IEM_MC_END();
1479}
1480
1481
1482/** Opcode 0x0f 0x01 /1. */
1483FNIEMOP_DEF(iemOp_Grp7_monitor)
1484{
1485 IEMOP_MNEMONIC(monitor, "monitor");
1486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1487 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1488}
1489
1490
1491/** Opcode 0x0f 0x01 /1. */
1492FNIEMOP_DEF(iemOp_Grp7_mwait)
1493{
1494 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1496 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1497}
1498
1499
1500/** Opcode 0x0f 0x01 /2. */
1501FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1502{
1503 IEMOP_MNEMONIC(lgdt, "lgdt");
1504 IEMOP_HLP_64BIT_OP_SIZE();
1505 IEM_MC_BEGIN(3, 1);
1506 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1507 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1508 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1511 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1512 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1513 IEM_MC_END();
1514}
1515
1516
1517/** Opcode 0x0f 0x01 0xd0. */
1518FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1519{
1520 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1521 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1522 {
1523 /** @todo r=ramshankar: We should use
1524 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1525 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1526 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1527 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1528 }
1529 return IEMOP_RAISE_INVALID_OPCODE();
1530}
1531
1532
1533/** Opcode 0x0f 0x01 0xd1. */
1534FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1535{
1536 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1537 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1538 {
1539 /** @todo r=ramshankar: We should use
1540 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1541 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1543 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1544 }
1545 return IEMOP_RAISE_INVALID_OPCODE();
1546}
1547
1548
1549/** Opcode 0x0f 0x01 /3. */
1550FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1551{
1552 IEMOP_MNEMONIC(lidt, "lidt");
1553 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1554 ? IEMMODE_64BIT
1555 : pVCpu->iem.s.enmEffOpSize;
1556 IEM_MC_BEGIN(3, 1);
1557 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1558 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1559 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1562 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1563 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1564 IEM_MC_END();
1565}
1566
1567
1568/** Opcode 0x0f 0x01 0xd8. */
1569#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1570FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1571{
1572 IEMOP_MNEMONIC(vmrun, "vmrun");
1573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1574 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1575}
1576#else
1577FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1578#endif
1579
1580/** Opcode 0x0f 0x01 0xd9. */
1581FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1582{
1583 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1585
1586 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1587 want all hypercalls regardless of instruction used, and if a
1588 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1589 (NEM/win makes ASSUMPTIONS about this behavior.) */
1590 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1591}
1592
1593/** Opcode 0x0f 0x01 0xda. */
1594#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1595FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1596{
1597 IEMOP_MNEMONIC(vmload, "vmload");
1598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1599 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1600}
1601#else
1602FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1603#endif
1604
1605
1606/** Opcode 0x0f 0x01 0xdb. */
1607#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1608FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1609{
1610 IEMOP_MNEMONIC(vmsave, "vmsave");
1611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1612 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1613}
1614#else
1615FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1616#endif
1617
1618
1619/** Opcode 0x0f 0x01 0xdc. */
1620#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1621FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1622{
1623 IEMOP_MNEMONIC(stgi, "stgi");
1624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1625 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1626}
1627#else
1628FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1629#endif
1630
1631
1632/** Opcode 0x0f 0x01 0xdd. */
1633#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1634FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1635{
1636 IEMOP_MNEMONIC(clgi, "clgi");
1637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1638 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1639}
1640#else
1641FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1642#endif
1643
1644
1645/** Opcode 0x0f 0x01 0xdf. */
1646#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1647FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1648{
1649 IEMOP_MNEMONIC(invlpga, "invlpga");
1650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1651 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1652}
1653#else
1654FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1655#endif
1656
1657
1658/** Opcode 0x0f 0x01 0xde. */
1659#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1660FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1661{
1662 IEMOP_MNEMONIC(skinit, "skinit");
1663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1664 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1665}
1666#else
1667FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1668#endif
1669
1670
1671/** Opcode 0x0f 0x01 /4. */
1672FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1673{
1674 IEMOP_MNEMONIC(smsw, "smsw");
1675 IEMOP_HLP_MIN_286();
1676 if (IEM_IS_MODRM_REG_MODE(bRm))
1677 {
1678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1679 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1680 }
1681
1682 /* Ignore operand size here, memory refs are always 16-bit. */
1683 IEM_MC_BEGIN(2, 0);
1684 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1685 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1688 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1689 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1690 IEM_MC_END();
1691}
1692
1693
1694/** Opcode 0x0f 0x01 /6. */
1695FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1696{
1697 /* The operand size is effectively ignored, all is 16-bit and only the
1698 lower 3-bits are used. */
1699 IEMOP_MNEMONIC(lmsw, "lmsw");
1700 IEMOP_HLP_MIN_286();
1701 if (IEM_IS_MODRM_REG_MODE(bRm))
1702 {
1703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1704 IEM_MC_BEGIN(2, 0);
1705 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1706 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1707 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1708 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1709 IEM_MC_END();
1710 }
1711 else
1712 {
1713 IEM_MC_BEGIN(2, 0);
1714 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1715 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1718 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1719 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1720 IEM_MC_END();
1721 }
1722}
1723
1724
1725/** Opcode 0x0f 0x01 /7. */
1726FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1727{
1728 IEMOP_MNEMONIC(invlpg, "invlpg");
1729 IEMOP_HLP_MIN_486();
1730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1731 IEM_MC_BEGIN(1, 1);
1732 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1734 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1735 IEM_MC_END();
1736}
1737
1738
1739/** Opcode 0x0f 0x01 0xf8. */
1740FNIEMOP_DEF(iemOp_Grp7_swapgs)
1741{
1742 IEMOP_MNEMONIC(swapgs, "swapgs");
1743 IEMOP_HLP_ONLY_64BIT();
1744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1745 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1746}
1747
1748
1749/** Opcode 0x0f 0x01 0xf9. */
1750FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1751{
1752 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1754 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1755}
1756
1757
1758/**
1759 * Group 7 jump table, memory variant.
1760 */
1761IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1762{
1763 iemOp_Grp7_sgdt,
1764 iemOp_Grp7_sidt,
1765 iemOp_Grp7_lgdt,
1766 iemOp_Grp7_lidt,
1767 iemOp_Grp7_smsw,
1768 iemOp_InvalidWithRM,
1769 iemOp_Grp7_lmsw,
1770 iemOp_Grp7_invlpg
1771};
1772
1773
1774/** Opcode 0x0f 0x01. */
1775FNIEMOP_DEF(iemOp_Grp7)
1776{
1777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1778 if (IEM_IS_MODRM_MEM_MODE(bRm))
1779 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1780
1781 switch (IEM_GET_MODRM_REG_8(bRm))
1782 {
1783 case 0:
1784 switch (IEM_GET_MODRM_RM_8(bRm))
1785 {
1786 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1787 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1788 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1789 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1790 }
1791 return IEMOP_RAISE_INVALID_OPCODE();
1792
1793 case 1:
1794 switch (IEM_GET_MODRM_RM_8(bRm))
1795 {
1796 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1797 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1798 }
1799 return IEMOP_RAISE_INVALID_OPCODE();
1800
1801 case 2:
1802 switch (IEM_GET_MODRM_RM_8(bRm))
1803 {
1804 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1805 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1806 }
1807 return IEMOP_RAISE_INVALID_OPCODE();
1808
1809 case 3:
1810 switch (IEM_GET_MODRM_RM_8(bRm))
1811 {
1812 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1813 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1814 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1815 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1816 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1817 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1818 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1819 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1821 }
1822
1823 case 4:
1824 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1825
1826 case 5:
1827 return IEMOP_RAISE_INVALID_OPCODE();
1828
1829 case 6:
1830 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1831
1832 case 7:
1833 switch (IEM_GET_MODRM_RM_8(bRm))
1834 {
1835 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1836 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1837 }
1838 return IEMOP_RAISE_INVALID_OPCODE();
1839
1840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1841 }
1842}
1843
1844/** Opcode 0x0f 0x00 /3. */
1845FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1846{
1847 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1849
1850 if (IEM_IS_MODRM_REG_MODE(bRm))
1851 {
1852 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1853 switch (pVCpu->iem.s.enmEffOpSize)
1854 {
1855 case IEMMODE_16BIT:
1856 {
1857 IEM_MC_BEGIN(3, 0);
1858 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1859 IEM_MC_ARG(uint16_t, u16Sel, 1);
1860 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1861
1862 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1863 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1864 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1865
1866 IEM_MC_END();
1867 }
1868
1869 case IEMMODE_32BIT:
1870 case IEMMODE_64BIT:
1871 {
1872 IEM_MC_BEGIN(3, 0);
1873 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1874 IEM_MC_ARG(uint16_t, u16Sel, 1);
1875 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1876
1877 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1878 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1879 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1880
1881 IEM_MC_END();
1882 }
1883
1884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1885 }
1886 }
1887 else
1888 {
1889 switch (pVCpu->iem.s.enmEffOpSize)
1890 {
1891 case IEMMODE_16BIT:
1892 {
1893 IEM_MC_BEGIN(3, 1);
1894 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1895 IEM_MC_ARG(uint16_t, u16Sel, 1);
1896 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1898
1899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1900 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1901
1902 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1904 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1905
1906 IEM_MC_END();
1907 }
1908
1909 case IEMMODE_32BIT:
1910 case IEMMODE_64BIT:
1911 {
1912 IEM_MC_BEGIN(3, 1);
1913 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1914 IEM_MC_ARG(uint16_t, u16Sel, 1);
1915 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1917
1918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1919 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1920/** @todo testcase: make sure it's a 16-bit read. */
1921
1922 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1923 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1924 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1925
1926 IEM_MC_END();
1927 }
1928
1929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1930 }
1931 }
1932}
1933
1934
1935
1936/** Opcode 0x0f 0x02. */
1937FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1938{
1939 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1940 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1941}
1942
1943
1944/** Opcode 0x0f 0x03. */
1945FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1946{
1947 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1948 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1949}
1950
1951
1952/** Opcode 0x0f 0x05. */
1953FNIEMOP_DEF(iemOp_syscall)
1954{
1955 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1957 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1958}
1959
1960
1961/** Opcode 0x0f 0x06. */
1962FNIEMOP_DEF(iemOp_clts)
1963{
1964 IEMOP_MNEMONIC(clts, "clts");
1965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1966 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1967}
1968
1969
1970/** Opcode 0x0f 0x07. */
1971FNIEMOP_DEF(iemOp_sysret)
1972{
1973 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1975 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1976}
1977
1978
1979/** Opcode 0x0f 0x08. */
1980FNIEMOP_DEF(iemOp_invd)
1981{
1982 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1983 IEMOP_HLP_MIN_486();
1984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1985 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1986}
1987
1988
1989/** Opcode 0x0f 0x09. */
1990FNIEMOP_DEF(iemOp_wbinvd)
1991{
1992 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1993 IEMOP_HLP_MIN_486();
1994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1996}
1997
1998
1999/** Opcode 0x0f 0x0b. */
2000FNIEMOP_DEF(iemOp_ud2)
2001{
2002 IEMOP_MNEMONIC(ud2, "ud2");
2003 return IEMOP_RAISE_INVALID_OPCODE();
2004}
2005
2006/** Opcode 0x0f 0x0d. */
2007FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2008{
2009 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2010 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2011 {
2012 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2013 return IEMOP_RAISE_INVALID_OPCODE();
2014 }
2015
2016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2017 if (IEM_IS_MODRM_REG_MODE(bRm))
2018 {
2019 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2020 return IEMOP_RAISE_INVALID_OPCODE();
2021 }
2022
2023 switch (IEM_GET_MODRM_REG_8(bRm))
2024 {
2025 case 2: /* Aliased to /0 for the time being. */
2026 case 4: /* Aliased to /0 for the time being. */
2027 case 5: /* Aliased to /0 for the time being. */
2028 case 6: /* Aliased to /0 for the time being. */
2029 case 7: /* Aliased to /0 for the time being. */
2030 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2031 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2032 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2034 }
2035
2036 IEM_MC_BEGIN(0, 1);
2037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2040 /* Currently a NOP. */
2041 NOREF(GCPtrEffSrc);
2042 IEM_MC_ADVANCE_RIP_AND_FINISH();
2043 IEM_MC_END();
2044}
2045
2046
2047/** Opcode 0x0f 0x0e. */
2048FNIEMOP_DEF(iemOp_femms)
2049{
2050 IEMOP_MNEMONIC(femms, "femms");
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052
2053 IEM_MC_BEGIN(0,0);
2054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2055 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2056 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2057 IEM_MC_FPU_FROM_MMX_MODE();
2058 IEM_MC_ADVANCE_RIP_AND_FINISH();
2059 IEM_MC_END();
2060}
2061
2062
2063/** Opcode 0x0f 0x0f. */
2064FNIEMOP_DEF(iemOp_3Dnow)
2065{
2066 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2067 {
2068 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2069 return IEMOP_RAISE_INVALID_OPCODE();
2070 }
2071
2072#ifdef IEM_WITH_3DNOW
2073 /* This is pretty sparse, use switch instead of table. */
2074 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2075 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2076#else
2077 IEMOP_BITCH_ABOUT_STUB();
2078 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2079#endif
2080}
2081
2082
2083/**
2084 * @opcode 0x10
2085 * @oppfx none
2086 * @opcpuid sse
2087 * @opgroup og_sse_simdfp_datamove
2088 * @opxcpttype 4UA
2089 * @optest op1=1 op2=2 -> op1=2
2090 * @optest op1=0 op2=-22 -> op1=-22
2091 */
2092FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2093{
2094 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2096 if (IEM_IS_MODRM_REG_MODE(bRm))
2097 {
2098 /*
2099 * XMM128, XMM128.
2100 */
2101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2102 IEM_MC_BEGIN(0, 0);
2103 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2104 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2105 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2106 IEM_GET_MODRM_RM(pVCpu, bRm));
2107 IEM_MC_ADVANCE_RIP_AND_FINISH();
2108 IEM_MC_END();
2109 }
2110 else
2111 {
2112 /*
2113 * XMM128, [mem128].
2114 */
2115 IEM_MC_BEGIN(0, 2);
2116 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2118
2119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2121 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2122 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2123
2124 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2125 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2126
2127 IEM_MC_ADVANCE_RIP_AND_FINISH();
2128 IEM_MC_END();
2129 }
2130
2131}
2132
2133
2134/**
2135 * @opcode 0x10
2136 * @oppfx 0x66
2137 * @opcpuid sse2
2138 * @opgroup og_sse2_pcksclr_datamove
2139 * @opxcpttype 4UA
2140 * @optest op1=1 op2=2 -> op1=2
2141 * @optest op1=0 op2=-42 -> op1=-42
2142 */
2143FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2144{
2145 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2147 if (IEM_IS_MODRM_REG_MODE(bRm))
2148 {
2149 /*
2150 * XMM128, XMM128.
2151 */
2152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2153 IEM_MC_BEGIN(0, 0);
2154 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2156 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2157 IEM_GET_MODRM_RM(pVCpu, bRm));
2158 IEM_MC_ADVANCE_RIP_AND_FINISH();
2159 IEM_MC_END();
2160 }
2161 else
2162 {
2163 /*
2164 * XMM128, [mem128].
2165 */
2166 IEM_MC_BEGIN(0, 2);
2167 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2169
2170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2172 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2173 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2174
2175 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2176 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2177
2178 IEM_MC_ADVANCE_RIP_AND_FINISH();
2179 IEM_MC_END();
2180 }
2181}
2182
2183
2184/**
2185 * @opcode 0x10
2186 * @oppfx 0xf3
2187 * @opcpuid sse
2188 * @opgroup og_sse_simdfp_datamove
2189 * @opxcpttype 5
2190 * @optest op1=1 op2=2 -> op1=2
2191 * @optest op1=0 op2=-22 -> op1=-22
2192 */
2193FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2194{
2195 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2196 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2197 if (IEM_IS_MODRM_REG_MODE(bRm))
2198 {
2199 /*
2200 * XMM32, XMM32.
2201 */
2202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2203 IEM_MC_BEGIN(0, 1);
2204 IEM_MC_LOCAL(uint32_t, uSrc);
2205
2206 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2207 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2208 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2209 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2210
2211 IEM_MC_ADVANCE_RIP_AND_FINISH();
2212 IEM_MC_END();
2213 }
2214 else
2215 {
2216 /*
2217 * XMM128, [mem32].
2218 */
2219 IEM_MC_BEGIN(0, 2);
2220 IEM_MC_LOCAL(uint32_t, uSrc);
2221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2222
2223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2225 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2226 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2227
2228 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2229 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2230
2231 IEM_MC_ADVANCE_RIP_AND_FINISH();
2232 IEM_MC_END();
2233 }
2234}
2235
2236
2237/**
2238 * @opcode 0x10
2239 * @oppfx 0xf2
2240 * @opcpuid sse2
2241 * @opgroup og_sse2_pcksclr_datamove
2242 * @opxcpttype 5
2243 * @optest op1=1 op2=2 -> op1=2
2244 * @optest op1=0 op2=-42 -> op1=-42
2245 */
2246FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2247{
2248 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2250 if (IEM_IS_MODRM_REG_MODE(bRm))
2251 {
2252 /*
2253 * XMM64, XMM64.
2254 */
2255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2256 IEM_MC_BEGIN(0, 1);
2257 IEM_MC_LOCAL(uint64_t, uSrc);
2258
2259 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2260 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2261 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2262 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2263
2264 IEM_MC_ADVANCE_RIP_AND_FINISH();
2265 IEM_MC_END();
2266 }
2267 else
2268 {
2269 /*
2270 * XMM128, [mem64].
2271 */
2272 IEM_MC_BEGIN(0, 2);
2273 IEM_MC_LOCAL(uint64_t, uSrc);
2274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2275
2276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2278 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2279 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2280
2281 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2282 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2283
2284 IEM_MC_ADVANCE_RIP_AND_FINISH();
2285 IEM_MC_END();
2286 }
2287}
2288
2289
2290/**
2291 * @opcode 0x11
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 4UA
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 */
2299FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2300{
2301 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2303 if (IEM_IS_MODRM_REG_MODE(bRm))
2304 {
2305 /*
2306 * XMM128, XMM128.
2307 */
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 IEM_MC_BEGIN(0, 0);
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2313 IEM_GET_MODRM_REG(pVCpu, bRm));
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * [mem128], XMM128.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2330
2331 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2332 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP_AND_FINISH();
2335 IEM_MC_END();
2336 }
2337}
2338
2339
2340/**
2341 * @opcode 0x11
2342 * @oppfx 0x66
2343 * @opcpuid sse2
2344 * @opgroup og_sse2_pcksclr_datamove
2345 * @opxcpttype 4UA
2346 * @optest op1=1 op2=2 -> op1=2
2347 * @optest op1=0 op2=-42 -> op1=-42
2348 */
2349FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2350{
2351 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2353 if (IEM_IS_MODRM_REG_MODE(bRm))
2354 {
2355 /*
2356 * XMM128, XMM128.
2357 */
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_BEGIN(0, 0);
2360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2362 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2363 IEM_GET_MODRM_REG(pVCpu, bRm));
2364 IEM_MC_ADVANCE_RIP_AND_FINISH();
2365 IEM_MC_END();
2366 }
2367 else
2368 {
2369 /*
2370 * [mem128], XMM128.
2371 */
2372 IEM_MC_BEGIN(0, 2);
2373 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2375
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2380
2381 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2382 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2383
2384 IEM_MC_ADVANCE_RIP_AND_FINISH();
2385 IEM_MC_END();
2386 }
2387}
2388
2389
2390/**
2391 * @opcode 0x11
2392 * @oppfx 0xf3
2393 * @opcpuid sse
2394 * @opgroup og_sse_simdfp_datamove
2395 * @opxcpttype 5
2396 * @optest op1=1 op2=2 -> op1=2
2397 * @optest op1=0 op2=-22 -> op1=-22
2398 */
2399FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2400{
2401 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2403 if (IEM_IS_MODRM_REG_MODE(bRm))
2404 {
2405 /*
2406 * XMM32, XMM32.
2407 */
2408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2409 IEM_MC_BEGIN(0, 1);
2410 IEM_MC_LOCAL(uint32_t, uSrc);
2411
2412 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2413 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2414 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2415 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2416
2417 IEM_MC_ADVANCE_RIP_AND_FINISH();
2418 IEM_MC_END();
2419 }
2420 else
2421 {
2422 /*
2423 * [mem32], XMM32.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2433
2434 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2435 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 }
2440}
2441
2442
2443/**
2444 * @opcode 0x11
2445 * @oppfx 0xf2
2446 * @opcpuid sse2
2447 * @opgroup og_sse2_pcksclr_datamove
2448 * @opxcpttype 5
2449 * @optest op1=1 op2=2 -> op1=2
2450 * @optest op1=0 op2=-42 -> op1=-42
2451 */
2452FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2453{
2454 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2456 if (IEM_IS_MODRM_REG_MODE(bRm))
2457 {
2458 /*
2459 * XMM64, XMM64.
2460 */
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2462 IEM_MC_BEGIN(0, 1);
2463 IEM_MC_LOCAL(uint64_t, uSrc);
2464
2465 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2467 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2468 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2469
2470 IEM_MC_ADVANCE_RIP_AND_FINISH();
2471 IEM_MC_END();
2472 }
2473 else
2474 {
2475 /*
2476 * [mem64], XMM64.
2477 */
2478 IEM_MC_BEGIN(0, 2);
2479 IEM_MC_LOCAL(uint64_t, uSrc);
2480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2481
2482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2484 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2485 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2486
2487 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2488 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2489
2490 IEM_MC_ADVANCE_RIP_AND_FINISH();
2491 IEM_MC_END();
2492 }
2493}
2494
2495
2496FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2497{
2498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2499 if (IEM_IS_MODRM_REG_MODE(bRm))
2500 {
2501 /**
2502 * @opcode 0x12
2503 * @opcodesub 11 mr/reg
2504 * @oppfx none
2505 * @opcpuid sse
2506 * @opgroup og_sse_simdfp_datamove
2507 * @opxcpttype 5
2508 * @optest op1=1 op2=2 -> op1=2
2509 * @optest op1=0 op2=-42 -> op1=-42
2510 */
2511 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2512
2513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2514 IEM_MC_BEGIN(0, 1);
2515 IEM_MC_LOCAL(uint64_t, uSrc);
2516
2517 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2518 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2519 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2520 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2521
2522 IEM_MC_ADVANCE_RIP_AND_FINISH();
2523 IEM_MC_END();
2524 }
2525 else
2526 {
2527 /**
2528 * @opdone
2529 * @opcode 0x12
2530 * @opcodesub !11 mr/reg
2531 * @oppfx none
2532 * @opcpuid sse
2533 * @opgroup og_sse_simdfp_datamove
2534 * @opxcpttype 5
2535 * @optest op1=1 op2=2 -> op1=2
2536 * @optest op1=0 op2=-42 -> op1=-42
2537 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2538 */
2539 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2540
2541 IEM_MC_BEGIN(0, 2);
2542 IEM_MC_LOCAL(uint64_t, uSrc);
2543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2544
2545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2549
2550 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2551 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2552
2553 IEM_MC_ADVANCE_RIP_AND_FINISH();
2554 IEM_MC_END();
2555 }
2556}
2557
2558
2559/**
2560 * @opcode 0x12
2561 * @opcodesub !11 mr/reg
2562 * @oppfx 0x66
2563 * @opcpuid sse2
2564 * @opgroup og_sse2_pcksclr_datamove
2565 * @opxcpttype 5
2566 * @optest op1=1 op2=2 -> op1=2
2567 * @optest op1=0 op2=-42 -> op1=-42
2568 */
2569FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2570{
2571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2572 if (IEM_IS_MODRM_MEM_MODE(bRm))
2573 {
2574 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2575
2576 IEM_MC_BEGIN(0, 2);
2577 IEM_MC_LOCAL(uint64_t, uSrc);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2579
2580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2582 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2583 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2584
2585 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2586 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2587
2588 IEM_MC_ADVANCE_RIP_AND_FINISH();
2589 IEM_MC_END();
2590 }
2591
2592 /**
2593 * @opdone
2594 * @opmnemonic ud660f12m3
2595 * @opcode 0x12
2596 * @opcodesub 11 mr/reg
2597 * @oppfx 0x66
2598 * @opunused immediate
2599 * @opcpuid sse
2600 * @optest ->
2601 */
2602 else
2603 return IEMOP_RAISE_INVALID_OPCODE();
2604}
2605
2606
2607/**
2608 * @opcode 0x12
2609 * @oppfx 0xf3
2610 * @opcpuid sse3
2611 * @opgroup og_sse3_pcksclr_datamove
2612 * @opxcpttype 4
2613 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2614 * op1=0x00000002000000020000000100000001
2615 */
2616FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2617{
2618 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2620 if (IEM_IS_MODRM_REG_MODE(bRm))
2621 {
2622 /*
2623 * XMM, XMM.
2624 */
2625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2626 IEM_MC_BEGIN(0, 1);
2627 IEM_MC_LOCAL(RTUINT128U, uSrc);
2628
2629 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2630 IEM_MC_PREPARE_SSE_USAGE();
2631
2632 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2633 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2634 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2635 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2636 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2637
2638 IEM_MC_ADVANCE_RIP_AND_FINISH();
2639 IEM_MC_END();
2640 }
2641 else
2642 {
2643 /*
2644 * XMM, [mem128].
2645 */
2646 IEM_MC_BEGIN(0, 2);
2647 IEM_MC_LOCAL(RTUINT128U, uSrc);
2648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2649
2650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2652 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2653 IEM_MC_PREPARE_SSE_USAGE();
2654
2655 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2656 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2657 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2658 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2659 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2660
2661 IEM_MC_ADVANCE_RIP_AND_FINISH();
2662 IEM_MC_END();
2663 }
2664}
2665
2666
2667/**
2668 * @opcode 0x12
2669 * @oppfx 0xf2
2670 * @opcpuid sse3
2671 * @opgroup og_sse3_pcksclr_datamove
2672 * @opxcpttype 5
2673 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2674 * op1=0x22222222111111112222222211111111
2675 */
2676FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2677{
2678 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680 if (IEM_IS_MODRM_REG_MODE(bRm))
2681 {
2682 /*
2683 * XMM128, XMM64.
2684 */
2685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2686 IEM_MC_BEGIN(1, 0);
2687 IEM_MC_ARG(uint64_t, uSrc, 0);
2688
2689 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2690 IEM_MC_PREPARE_SSE_USAGE();
2691
2692 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2693 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2694 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2695
2696 IEM_MC_ADVANCE_RIP_AND_FINISH();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /*
2702 * XMM128, [mem64].
2703 */
2704 IEM_MC_BEGIN(1, 1);
2705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2706 IEM_MC_ARG(uint64_t, uSrc, 0);
2707
2708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2710 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2711 IEM_MC_PREPARE_SSE_USAGE();
2712
2713 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2714 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2715 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2716
2717 IEM_MC_ADVANCE_RIP_AND_FINISH();
2718 IEM_MC_END();
2719 }
2720}
2721
2722
2723/**
2724 * @opcode 0x13
2725 * @opcodesub !11 mr/reg
2726 * @oppfx none
2727 * @opcpuid sse
2728 * @opgroup og_sse_simdfp_datamove
2729 * @opxcpttype 5
2730 * @optest op1=1 op2=2 -> op1=2
2731 * @optest op1=0 op2=-42 -> op1=-42
2732 */
2733FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2734{
2735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2736 if (IEM_IS_MODRM_MEM_MODE(bRm))
2737 {
2738 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2739
2740 IEM_MC_BEGIN(0, 2);
2741 IEM_MC_LOCAL(uint64_t, uSrc);
2742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2743
2744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2746 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2747 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2748
2749 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2750 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2751
2752 IEM_MC_ADVANCE_RIP_AND_FINISH();
2753 IEM_MC_END();
2754 }
2755
2756 /**
2757 * @opdone
2758 * @opmnemonic ud0f13m3
2759 * @opcode 0x13
2760 * @opcodesub 11 mr/reg
2761 * @oppfx none
2762 * @opunused immediate
2763 * @opcpuid sse
2764 * @optest ->
2765 */
2766 else
2767 return IEMOP_RAISE_INVALID_OPCODE();
2768}
2769
2770
2771/**
2772 * @opcode 0x13
2773 * @opcodesub !11 mr/reg
2774 * @oppfx 0x66
2775 * @opcpuid sse2
2776 * @opgroup og_sse2_pcksclr_datamove
2777 * @opxcpttype 5
2778 * @optest op1=1 op2=2 -> op1=2
2779 * @optest op1=0 op2=-42 -> op1=-42
2780 */
2781FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2782{
2783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2784 if (IEM_IS_MODRM_MEM_MODE(bRm))
2785 {
2786 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2787 IEM_MC_BEGIN(0, 2);
2788 IEM_MC_LOCAL(uint64_t, uSrc);
2789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2790
2791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2793 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2794 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2795
2796 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2797 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2798
2799 IEM_MC_ADVANCE_RIP_AND_FINISH();
2800 IEM_MC_END();
2801 }
2802
2803 /**
2804 * @opdone
2805 * @opmnemonic ud660f13m3
2806 * @opcode 0x13
2807 * @opcodesub 11 mr/reg
2808 * @oppfx 0x66
2809 * @opunused immediate
2810 * @opcpuid sse
2811 * @optest ->
2812 */
2813 else
2814 return IEMOP_RAISE_INVALID_OPCODE();
2815}
2816
2817
2818/**
2819 * @opmnemonic udf30f13
2820 * @opcode 0x13
2821 * @oppfx 0xf3
2822 * @opunused intel-modrm
2823 * @opcpuid sse
2824 * @optest ->
2825 * @opdone
2826 */
2827
2828/**
2829 * @opmnemonic udf20f13
2830 * @opcode 0x13
2831 * @oppfx 0xf2
2832 * @opunused intel-modrm
2833 * @opcpuid sse
2834 * @optest ->
2835 * @opdone
2836 */
2837
2838/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2839FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2840{
2841 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2842 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2843}
2844
2845
2846/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2847FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2848{
2849 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2850 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2851}
2852
2853
2854/**
2855 * @opdone
2856 * @opmnemonic udf30f14
2857 * @opcode 0x14
2858 * @oppfx 0xf3
2859 * @opunused intel-modrm
2860 * @opcpuid sse
2861 * @optest ->
2862 * @opdone
2863 */
2864
2865/**
2866 * @opmnemonic udf20f14
2867 * @opcode 0x14
2868 * @oppfx 0xf2
2869 * @opunused intel-modrm
2870 * @opcpuid sse
2871 * @optest ->
2872 * @opdone
2873 */
2874
2875/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2876FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2877{
2878 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2879 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2880}
2881
2882
2883/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2884FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2885{
2886 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2887 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2888}
2889
2890
2891/* Opcode 0xf3 0x0f 0x15 - invalid */
2892/* Opcode 0xf2 0x0f 0x15 - invalid */
2893
2894/**
2895 * @opdone
2896 * @opmnemonic udf30f15
2897 * @opcode 0x15
2898 * @oppfx 0xf3
2899 * @opunused intel-modrm
2900 * @opcpuid sse
2901 * @optest ->
2902 * @opdone
2903 */
2904
2905/**
2906 * @opmnemonic udf20f15
2907 * @opcode 0x15
2908 * @oppfx 0xf2
2909 * @opunused intel-modrm
2910 * @opcpuid sse
2911 * @optest ->
2912 * @opdone
2913 */
2914
2915FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2916{
2917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2918 if (IEM_IS_MODRM_REG_MODE(bRm))
2919 {
2920 /**
2921 * @opcode 0x16
2922 * @opcodesub 11 mr/reg
2923 * @oppfx none
2924 * @opcpuid sse
2925 * @opgroup og_sse_simdfp_datamove
2926 * @opxcpttype 5
2927 * @optest op1=1 op2=2 -> op1=2
2928 * @optest op1=0 op2=-42 -> op1=-42
2929 */
2930 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2931
2932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2933 IEM_MC_BEGIN(0, 1);
2934 IEM_MC_LOCAL(uint64_t, uSrc);
2935
2936 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2937 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2938 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2939 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2940
2941 IEM_MC_ADVANCE_RIP_AND_FINISH();
2942 IEM_MC_END();
2943 }
2944 else
2945 {
2946 /**
2947 * @opdone
2948 * @opcode 0x16
2949 * @opcodesub !11 mr/reg
2950 * @oppfx none
2951 * @opcpuid sse
2952 * @opgroup og_sse_simdfp_datamove
2953 * @opxcpttype 5
2954 * @optest op1=1 op2=2 -> op1=2
2955 * @optest op1=0 op2=-42 -> op1=-42
2956 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2957 */
2958 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2959
2960 IEM_MC_BEGIN(0, 2);
2961 IEM_MC_LOCAL(uint64_t, uSrc);
2962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2963
2964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2966 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2967 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2968
2969 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2970 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2971
2972 IEM_MC_ADVANCE_RIP_AND_FINISH();
2973 IEM_MC_END();
2974 }
2975}
2976
2977
2978/**
2979 * @opcode 0x16
2980 * @opcodesub !11 mr/reg
2981 * @oppfx 0x66
2982 * @opcpuid sse2
2983 * @opgroup og_sse2_pcksclr_datamove
2984 * @opxcpttype 5
2985 * @optest op1=1 op2=2 -> op1=2
2986 * @optest op1=0 op2=-42 -> op1=-42
2987 */
2988FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2989{
2990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2991 if (IEM_IS_MODRM_MEM_MODE(bRm))
2992 {
2993 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2994 IEM_MC_BEGIN(0, 2);
2995 IEM_MC_LOCAL(uint64_t, uSrc);
2996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2997
2998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3000 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3001 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3002
3003 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3004 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3005
3006 IEM_MC_ADVANCE_RIP_AND_FINISH();
3007 IEM_MC_END();
3008 }
3009
3010 /**
3011 * @opdone
3012 * @opmnemonic ud660f16m3
3013 * @opcode 0x16
3014 * @opcodesub 11 mr/reg
3015 * @oppfx 0x66
3016 * @opunused immediate
3017 * @opcpuid sse
3018 * @optest ->
3019 */
3020 else
3021 return IEMOP_RAISE_INVALID_OPCODE();
3022}
3023
3024
3025/**
3026 * @opcode 0x16
3027 * @oppfx 0xf3
3028 * @opcpuid sse3
3029 * @opgroup og_sse3_pcksclr_datamove
3030 * @opxcpttype 4
3031 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3032 * op1=0x00000002000000020000000100000001
3033 */
3034FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3035{
3036 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3038 if (IEM_IS_MODRM_REG_MODE(bRm))
3039 {
3040 /*
3041 * XMM128, XMM128.
3042 */
3043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3044 IEM_MC_BEGIN(0, 1);
3045 IEM_MC_LOCAL(RTUINT128U, uSrc);
3046
3047 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3048 IEM_MC_PREPARE_SSE_USAGE();
3049
3050 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3051 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3052 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3053 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3054 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3055
3056 IEM_MC_ADVANCE_RIP_AND_FINISH();
3057 IEM_MC_END();
3058 }
3059 else
3060 {
3061 /*
3062 * XMM128, [mem128].
3063 */
3064 IEM_MC_BEGIN(0, 2);
3065 IEM_MC_LOCAL(RTUINT128U, uSrc);
3066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3067
3068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3070 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3071 IEM_MC_PREPARE_SSE_USAGE();
3072
3073 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3074 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3075 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3076 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3077 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3078
3079 IEM_MC_ADVANCE_RIP_AND_FINISH();
3080 IEM_MC_END();
3081 }
3082}
3083
3084/**
3085 * @opdone
3086 * @opmnemonic udf30f16
3087 * @opcode 0x16
3088 * @oppfx 0xf2
3089 * @opunused intel-modrm
3090 * @opcpuid sse
3091 * @optest ->
3092 * @opdone
3093 */
3094
3095
3096/**
3097 * @opcode 0x17
3098 * @opcodesub !11 mr/reg
3099 * @oppfx none
3100 * @opcpuid sse
3101 * @opgroup og_sse_simdfp_datamove
3102 * @opxcpttype 5
3103 * @optest op1=1 op2=2 -> op1=2
3104 * @optest op1=0 op2=-42 -> op1=-42
3105 */
3106FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3107{
3108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3109 if (IEM_IS_MODRM_MEM_MODE(bRm))
3110 {
3111 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3112
3113 IEM_MC_BEGIN(0, 2);
3114 IEM_MC_LOCAL(uint64_t, uSrc);
3115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3116
3117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3119 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3120 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3121
3122 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3123 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3124
3125 IEM_MC_ADVANCE_RIP_AND_FINISH();
3126 IEM_MC_END();
3127 }
3128
3129 /**
3130 * @opdone
3131 * @opmnemonic ud0f17m3
3132 * @opcode 0x17
3133 * @opcodesub 11 mr/reg
3134 * @oppfx none
3135 * @opunused immediate
3136 * @opcpuid sse
3137 * @optest ->
3138 */
3139 else
3140 return IEMOP_RAISE_INVALID_OPCODE();
3141}
3142
3143
3144/**
3145 * @opcode 0x17
3146 * @opcodesub !11 mr/reg
3147 * @oppfx 0x66
3148 * @opcpuid sse2
3149 * @opgroup og_sse2_pcksclr_datamove
3150 * @opxcpttype 5
3151 * @optest op1=1 op2=2 -> op1=2
3152 * @optest op1=0 op2=-42 -> op1=-42
3153 */
3154FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3155{
3156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3157 if (IEM_IS_MODRM_MEM_MODE(bRm))
3158 {
3159 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3160
3161 IEM_MC_BEGIN(0, 2);
3162 IEM_MC_LOCAL(uint64_t, uSrc);
3163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3164
3165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3167 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3169
3170 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3171 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3172
3173 IEM_MC_ADVANCE_RIP_AND_FINISH();
3174 IEM_MC_END();
3175 }
3176
3177 /**
3178 * @opdone
3179 * @opmnemonic ud660f17m3
3180 * @opcode 0x17
3181 * @opcodesub 11 mr/reg
3182 * @oppfx 0x66
3183 * @opunused immediate
3184 * @opcpuid sse
3185 * @optest ->
3186 */
3187 else
3188 return IEMOP_RAISE_INVALID_OPCODE();
3189}
3190
3191
3192/**
3193 * @opdone
3194 * @opmnemonic udf30f17
3195 * @opcode 0x17
3196 * @oppfx 0xf3
3197 * @opunused intel-modrm
3198 * @opcpuid sse
3199 * @optest ->
3200 * @opdone
3201 */
3202
3203/**
3204 * @opmnemonic udf20f17
3205 * @opcode 0x17
3206 * @oppfx 0xf2
3207 * @opunused intel-modrm
3208 * @opcpuid sse
3209 * @optest ->
3210 * @opdone
3211 */
3212
3213
3214/** Opcode 0x0f 0x18. */
3215FNIEMOP_DEF(iemOp_prefetch_Grp16)
3216{
3217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3218 if (IEM_IS_MODRM_MEM_MODE(bRm))
3219 {
3220 switch (IEM_GET_MODRM_REG_8(bRm))
3221 {
3222 case 4: /* Aliased to /0 for the time being according to AMD. */
3223 case 5: /* Aliased to /0 for the time being according to AMD. */
3224 case 6: /* Aliased to /0 for the time being according to AMD. */
3225 case 7: /* Aliased to /0 for the time being according to AMD. */
3226 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3227 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3228 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3229 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3231 }
3232
3233 IEM_MC_BEGIN(0, 1);
3234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237 /* Currently a NOP. */
3238 NOREF(GCPtrEffSrc);
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242 else
3243 return IEMOP_RAISE_INVALID_OPCODE();
3244}
3245
3246
3247/** Opcode 0x0f 0x19..0x1f. */
3248FNIEMOP_DEF(iemOp_nop_Ev)
3249{
3250 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3252 if (IEM_IS_MODRM_REG_MODE(bRm))
3253 {
3254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3255 IEM_MC_BEGIN(0, 0);
3256 IEM_MC_ADVANCE_RIP_AND_FINISH();
3257 IEM_MC_END();
3258 }
3259 else
3260 {
3261 IEM_MC_BEGIN(0, 1);
3262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3265 /* Currently a NOP. */
3266 NOREF(GCPtrEffSrc);
3267 IEM_MC_ADVANCE_RIP_AND_FINISH();
3268 IEM_MC_END();
3269 }
3270}
3271
3272
3273/** Opcode 0x0f 0x20. */
3274FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3275{
3276 /* mod is ignored, as is operand size overrides. */
3277 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3278 IEMOP_HLP_MIN_386();
3279 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3280 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3281 else
3282 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3283
3284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3285 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3286 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3287 {
3288 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3289 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3290 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3291 iCrReg |= 8;
3292 }
3293 switch (iCrReg)
3294 {
3295 case 0: case 2: case 3: case 4: case 8:
3296 break;
3297 default:
3298 return IEMOP_RAISE_INVALID_OPCODE();
3299 }
3300 IEMOP_HLP_DONE_DECODING();
3301
3302 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3303}
3304
3305
3306/** Opcode 0x0f 0x21. */
3307FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3308{
3309 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3310 IEMOP_HLP_MIN_386();
3311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3313 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3314 return IEMOP_RAISE_INVALID_OPCODE();
3315 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3316 IEM_GET_MODRM_RM(pVCpu, bRm),
3317 IEM_GET_MODRM_REG_8(bRm));
3318}
3319
3320
3321/** Opcode 0x0f 0x22. */
3322FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3323{
3324 /* mod is ignored, as is operand size overrides. */
3325 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3326 IEMOP_HLP_MIN_386();
3327 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3328 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3329 else
3330 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3331
3332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3333 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3334 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3335 {
3336 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3337 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3338 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3339 iCrReg |= 8;
3340 }
3341 switch (iCrReg)
3342 {
3343 case 0: case 2: case 3: case 4: case 8:
3344 break;
3345 default:
3346 return IEMOP_RAISE_INVALID_OPCODE();
3347 }
3348 IEMOP_HLP_DONE_DECODING();
3349
3350 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3351}
3352
3353
3354/** Opcode 0x0f 0x23. */
3355FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3356{
3357 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3358 IEMOP_HLP_MIN_386();
3359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3361 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3362 return IEMOP_RAISE_INVALID_OPCODE();
3363 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3364 IEM_GET_MODRM_REG_8(bRm),
3365 IEM_GET_MODRM_RM(pVCpu, bRm));
3366}
3367
3368
3369/** Opcode 0x0f 0x24. */
3370FNIEMOP_DEF(iemOp_mov_Rd_Td)
3371{
3372 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3373 IEMOP_HLP_MIN_386();
3374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3376 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3377 return IEMOP_RAISE_INVALID_OPCODE();
3378 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3379 IEM_GET_MODRM_RM(pVCpu, bRm),
3380 IEM_GET_MODRM_REG_8(bRm));
3381}
3382
3383
3384/** Opcode 0x0f 0x26. */
3385FNIEMOP_DEF(iemOp_mov_Td_Rd)
3386{
3387 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3388 IEMOP_HLP_MIN_386();
3389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3391 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3392 return IEMOP_RAISE_INVALID_OPCODE();
3393 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3394 IEM_GET_MODRM_REG_8(bRm),
3395 IEM_GET_MODRM_RM(pVCpu, bRm));
3396}
3397
3398
3399/**
3400 * @opcode 0x28
3401 * @oppfx none
3402 * @opcpuid sse
3403 * @opgroup og_sse_simdfp_datamove
3404 * @opxcpttype 1
3405 * @optest op1=1 op2=2 -> op1=2
3406 * @optest op1=0 op2=-42 -> op1=-42
3407 */
3408FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3409{
3410 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3412 if (IEM_IS_MODRM_REG_MODE(bRm))
3413 {
3414 /*
3415 * Register, register.
3416 */
3417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3418 IEM_MC_BEGIN(0, 0);
3419 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3420 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3421 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3422 IEM_GET_MODRM_RM(pVCpu, bRm));
3423 IEM_MC_ADVANCE_RIP_AND_FINISH();
3424 IEM_MC_END();
3425 }
3426 else
3427 {
3428 /*
3429 * Register, memory.
3430 */
3431 IEM_MC_BEGIN(0, 2);
3432 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3434
3435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3439
3440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3441 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3442
3443 IEM_MC_ADVANCE_RIP_AND_FINISH();
3444 IEM_MC_END();
3445 }
3446}
3447
3448/**
3449 * @opcode 0x28
3450 * @oppfx 66
3451 * @opcpuid sse2
3452 * @opgroup og_sse2_pcksclr_datamove
3453 * @opxcpttype 1
3454 * @optest op1=1 op2=2 -> op1=2
3455 * @optest op1=0 op2=-42 -> op1=-42
3456 */
3457FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3458{
3459 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3461 if (IEM_IS_MODRM_REG_MODE(bRm))
3462 {
3463 /*
3464 * Register, register.
3465 */
3466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3467 IEM_MC_BEGIN(0, 0);
3468 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3469 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3470 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3471 IEM_GET_MODRM_RM(pVCpu, bRm));
3472 IEM_MC_ADVANCE_RIP_AND_FINISH();
3473 IEM_MC_END();
3474 }
3475 else
3476 {
3477 /*
3478 * Register, memory.
3479 */
3480 IEM_MC_BEGIN(0, 2);
3481 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3483
3484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3487 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3488
3489 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3490 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3491
3492 IEM_MC_ADVANCE_RIP_AND_FINISH();
3493 IEM_MC_END();
3494 }
3495}
3496
3497/* Opcode 0xf3 0x0f 0x28 - invalid */
3498/* Opcode 0xf2 0x0f 0x28 - invalid */
3499
3500/**
3501 * @opcode 0x29
3502 * @oppfx none
3503 * @opcpuid sse
3504 * @opgroup og_sse_simdfp_datamove
3505 * @opxcpttype 1
3506 * @optest op1=1 op2=2 -> op1=2
3507 * @optest op1=0 op2=-42 -> op1=-42
3508 */
3509FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3510{
3511 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3513 if (IEM_IS_MODRM_REG_MODE(bRm))
3514 {
3515 /*
3516 * Register, register.
3517 */
3518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3519 IEM_MC_BEGIN(0, 0);
3520 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3522 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3523 IEM_GET_MODRM_REG(pVCpu, bRm));
3524 IEM_MC_ADVANCE_RIP_AND_FINISH();
3525 IEM_MC_END();
3526 }
3527 else
3528 {
3529 /*
3530 * Memory, register.
3531 */
3532 IEM_MC_BEGIN(0, 2);
3533 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3535
3536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3538 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3539 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3540
3541 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3542 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3543
3544 IEM_MC_ADVANCE_RIP_AND_FINISH();
3545 IEM_MC_END();
3546 }
3547}
3548
3549/**
3550 * @opcode 0x29
3551 * @oppfx 66
3552 * @opcpuid sse2
3553 * @opgroup og_sse2_pcksclr_datamove
3554 * @opxcpttype 1
3555 * @optest op1=1 op2=2 -> op1=2
3556 * @optest op1=0 op2=-42 -> op1=-42
3557 */
3558FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3559{
3560 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3562 if (IEM_IS_MODRM_REG_MODE(bRm))
3563 {
3564 /*
3565 * Register, register.
3566 */
3567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3568 IEM_MC_BEGIN(0, 0);
3569 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3570 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3571 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3572 IEM_GET_MODRM_REG(pVCpu, bRm));
3573 IEM_MC_ADVANCE_RIP_AND_FINISH();
3574 IEM_MC_END();
3575 }
3576 else
3577 {
3578 /*
3579 * Memory, register.
3580 */
3581 IEM_MC_BEGIN(0, 2);
3582 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3584
3585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3587 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3589
3590 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3591 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3592
3593 IEM_MC_ADVANCE_RIP_AND_FINISH();
3594 IEM_MC_END();
3595 }
3596}
3597
3598/* Opcode 0xf3 0x0f 0x29 - invalid */
3599/* Opcode 0xf2 0x0f 0x29 - invalid */
3600
3601
3602/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3603FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3604{
3605 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3607 if (IEM_IS_MODRM_REG_MODE(bRm))
3608 {
3609 /*
3610 * XMM, MMX
3611 */
3612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3613
3614 IEM_MC_BEGIN(3, 1);
3615 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3616 IEM_MC_LOCAL(X86XMMREG, Dst);
3617 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3618 IEM_MC_ARG(uint64_t, u64Src, 2);
3619 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3620 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3621 IEM_MC_PREPARE_FPU_USAGE();
3622 IEM_MC_FPU_TO_MMX_MODE();
3623
3624 IEM_MC_REF_MXCSR(pfMxcsr);
3625 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3626 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3627
3628 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3629 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3630 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3631 } IEM_MC_ELSE() {
3632 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3633 } IEM_MC_ENDIF();
3634
3635 IEM_MC_ADVANCE_RIP_AND_FINISH();
3636 IEM_MC_END();
3637 }
3638 else
3639 {
3640 /*
3641 * XMM, [mem64]
3642 */
3643 IEM_MC_BEGIN(3, 2);
3644 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3645 IEM_MC_LOCAL(X86XMMREG, Dst);
3646 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3647 IEM_MC_ARG(uint64_t, u64Src, 2);
3648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3649
3650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3652 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3653 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3654 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3655
3656 IEM_MC_PREPARE_FPU_USAGE();
3657 IEM_MC_FPU_TO_MMX_MODE();
3658 IEM_MC_REF_MXCSR(pfMxcsr);
3659
3660 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3661 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3662 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3663 } IEM_MC_ELSE() {
3664 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3665 } IEM_MC_ENDIF();
3666
3667 IEM_MC_ADVANCE_RIP_AND_FINISH();
3668 IEM_MC_END();
3669 }
3670}
3671
3672
3673/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3674FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3675{
3676 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3678 if (IEM_IS_MODRM_REG_MODE(bRm))
3679 {
3680 /*
3681 * XMM, MMX
3682 */
3683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3684
3685 IEM_MC_BEGIN(3, 1);
3686 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3687 IEM_MC_LOCAL(X86XMMREG, Dst);
3688 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3689 IEM_MC_ARG(uint64_t, u64Src, 2);
3690 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3691 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3692 IEM_MC_PREPARE_FPU_USAGE();
3693 IEM_MC_FPU_TO_MMX_MODE();
3694
3695 IEM_MC_REF_MXCSR(pfMxcsr);
3696 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3697
3698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3699 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3700 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3701 } IEM_MC_ELSE() {
3702 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3703 } IEM_MC_ENDIF();
3704
3705 IEM_MC_ADVANCE_RIP_AND_FINISH();
3706 IEM_MC_END();
3707 }
3708 else
3709 {
3710 /*
3711 * XMM, [mem64]
3712 */
3713 IEM_MC_BEGIN(3, 3);
3714 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3715 IEM_MC_LOCAL(X86XMMREG, Dst);
3716 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3717 IEM_MC_ARG(uint64_t, u64Src, 2);
3718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3719
3720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3722 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3723 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3724 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3725
3726 /* Doesn't cause a transition to MMX mode. */
3727 IEM_MC_PREPARE_SSE_USAGE();
3728 IEM_MC_REF_MXCSR(pfMxcsr);
3729
3730 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3731 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3732 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3733 } IEM_MC_ELSE() {
3734 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3735 } IEM_MC_ENDIF();
3736
3737 IEM_MC_ADVANCE_RIP_AND_FINISH();
3738 IEM_MC_END();
3739 }
3740}
3741
3742
3743/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3744FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3745{
3746 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3747
3748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3749 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3750 {
3751 if (IEM_IS_MODRM_REG_MODE(bRm))
3752 {
3753 /* XMM, greg64 */
3754 IEM_MC_BEGIN(3, 2);
3755 IEM_MC_LOCAL(uint32_t, fMxcsr);
3756 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3757 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3758 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3759 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3760
3761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3762 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3763 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3764
3765 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3766 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3767 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3768 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3769 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3770 } IEM_MC_ELSE() {
3771 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3772 } IEM_MC_ENDIF();
3773
3774 IEM_MC_ADVANCE_RIP_AND_FINISH();
3775 IEM_MC_END();
3776 }
3777 else
3778 {
3779 /* XMM, [mem64] */
3780 IEM_MC_BEGIN(3, 4);
3781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3782 IEM_MC_LOCAL(uint32_t, fMxcsr);
3783 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3784 IEM_MC_LOCAL(int64_t, i64Src);
3785 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3786 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3787 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3788
3789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3791 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3792 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3793
3794 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3795 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3796 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3797 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3798 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3799 } IEM_MC_ELSE() {
3800 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3801 } IEM_MC_ENDIF();
3802
3803 IEM_MC_ADVANCE_RIP_AND_FINISH();
3804 IEM_MC_END();
3805 }
3806 }
3807 else
3808 {
3809 if (IEM_IS_MODRM_REG_MODE(bRm))
3810 {
3811 /* greg, XMM */
3812 IEM_MC_BEGIN(3, 2);
3813 IEM_MC_LOCAL(uint32_t, fMxcsr);
3814 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3815 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3816 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3817 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3818
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3820 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3821 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3822
3823 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3824 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3825 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3826 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3827 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3828 } IEM_MC_ELSE() {
3829 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3830 } IEM_MC_ENDIF();
3831
3832 IEM_MC_ADVANCE_RIP_AND_FINISH();
3833 IEM_MC_END();
3834 }
3835 else
3836 {
3837 /* greg, [mem32] */
3838 IEM_MC_BEGIN(3, 4);
3839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3840 IEM_MC_LOCAL(uint32_t, fMxcsr);
3841 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3842 IEM_MC_LOCAL(int32_t, i32Src);
3843 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3844 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3845 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3846
3847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3849 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3850 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3851
3852 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3853 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3854 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3855 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3856 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3857 } IEM_MC_ELSE() {
3858 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3859 } IEM_MC_ENDIF();
3860
3861 IEM_MC_ADVANCE_RIP_AND_FINISH();
3862 IEM_MC_END();
3863 }
3864 }
3865}
3866
3867
3868/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3869FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3870{
3871 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3872
3873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3874 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3875 {
3876 if (IEM_IS_MODRM_REG_MODE(bRm))
3877 {
3878 /* XMM, greg64 */
3879 IEM_MC_BEGIN(3, 2);
3880 IEM_MC_LOCAL(uint32_t, fMxcsr);
3881 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3882 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3883 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3884 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3885
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3887 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3888 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3889
3890 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3891 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3892 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3893 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3894 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3895 } IEM_MC_ELSE() {
3896 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3897 } IEM_MC_ENDIF();
3898
3899 IEM_MC_ADVANCE_RIP_AND_FINISH();
3900 IEM_MC_END();
3901 }
3902 else
3903 {
3904 /* XMM, [mem64] */
3905 IEM_MC_BEGIN(3, 4);
3906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3907 IEM_MC_LOCAL(uint32_t, fMxcsr);
3908 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3909 IEM_MC_LOCAL(int64_t, i64Src);
3910 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3911 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3912 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3913
3914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3916 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3917 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3918
3919 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3920 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3921 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3922 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3923 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3924 } IEM_MC_ELSE() {
3925 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3926 } IEM_MC_ENDIF();
3927
3928 IEM_MC_ADVANCE_RIP_AND_FINISH();
3929 IEM_MC_END();
3930 }
3931 }
3932 else
3933 {
3934 if (IEM_IS_MODRM_REG_MODE(bRm))
3935 {
3936 /* XMM, greg32 */
3937 IEM_MC_BEGIN(3, 2);
3938 IEM_MC_LOCAL(uint32_t, fMxcsr);
3939 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3940 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3941 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3942 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3943
3944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3946 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3947
3948 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3949 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3950 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3951 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3952 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3953 } IEM_MC_ELSE() {
3954 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3955 } IEM_MC_ENDIF();
3956
3957 IEM_MC_ADVANCE_RIP_AND_FINISH();
3958 IEM_MC_END();
3959 }
3960 else
3961 {
3962 /* XMM, [mem32] */
3963 IEM_MC_BEGIN(3, 4);
3964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3965 IEM_MC_LOCAL(uint32_t, fMxcsr);
3966 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3967 IEM_MC_LOCAL(int32_t, i32Src);
3968 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3969 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3970 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3971
3972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3974 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3975 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3976
3977 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3978 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3979 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3980 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3981 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3982 } IEM_MC_ELSE() {
3983 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3984 } IEM_MC_ENDIF();
3985
3986 IEM_MC_ADVANCE_RIP_AND_FINISH();
3987 IEM_MC_END();
3988 }
3989 }
3990}
3991
3992
3993/**
3994 * @opcode 0x2b
3995 * @opcodesub !11 mr/reg
3996 * @oppfx none
3997 * @opcpuid sse
3998 * @opgroup og_sse1_cachect
3999 * @opxcpttype 1
4000 * @optest op1=1 op2=2 -> op1=2
4001 * @optest op1=0 op2=-42 -> op1=-42
4002 */
4003FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4004{
4005 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4007 if (IEM_IS_MODRM_MEM_MODE(bRm))
4008 {
4009 /*
4010 * memory, register.
4011 */
4012 IEM_MC_BEGIN(0, 2);
4013 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4015
4016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4020
4021 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4022 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4023
4024 IEM_MC_ADVANCE_RIP_AND_FINISH();
4025 IEM_MC_END();
4026 }
4027 /* The register, register encoding is invalid. */
4028 else
4029 return IEMOP_RAISE_INVALID_OPCODE();
4030}
4031
4032/**
4033 * @opcode 0x2b
4034 * @opcodesub !11 mr/reg
4035 * @oppfx 0x66
4036 * @opcpuid sse2
4037 * @opgroup og_sse2_cachect
4038 * @opxcpttype 1
4039 * @optest op1=1 op2=2 -> op1=2
4040 * @optest op1=0 op2=-42 -> op1=-42
4041 */
4042FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4043{
4044 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4046 if (IEM_IS_MODRM_MEM_MODE(bRm))
4047 {
4048 /*
4049 * memory, register.
4050 */
4051 IEM_MC_BEGIN(0, 2);
4052 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4054
4055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4057 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4058 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4059
4060 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4061 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4062
4063 IEM_MC_ADVANCE_RIP_AND_FINISH();
4064 IEM_MC_END();
4065 }
4066 /* The register, register encoding is invalid. */
4067 else
4068 return IEMOP_RAISE_INVALID_OPCODE();
4069}
4070/* Opcode 0xf3 0x0f 0x2b - invalid */
4071/* Opcode 0xf2 0x0f 0x2b - invalid */
4072
4073
4074/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4075FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4076{
4077 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4079 if (IEM_IS_MODRM_REG_MODE(bRm))
4080 {
4081 /*
4082 * Register, register.
4083 */
4084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4085
4086 IEM_MC_BEGIN(3, 1);
4087 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4088 IEM_MC_LOCAL(uint64_t, u64Dst);
4089 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4090 IEM_MC_ARG(uint64_t, u64Src, 2);
4091 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4092 IEM_MC_PREPARE_FPU_USAGE();
4093 IEM_MC_FPU_TO_MMX_MODE();
4094
4095 IEM_MC_REF_MXCSR(pfMxcsr);
4096 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4097
4098 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4099 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4100 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4101 } IEM_MC_ELSE() {
4102 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4103 } IEM_MC_ENDIF();
4104
4105 IEM_MC_ADVANCE_RIP_AND_FINISH();
4106 IEM_MC_END();
4107 }
4108 else
4109 {
4110 /*
4111 * Register, memory.
4112 */
4113 IEM_MC_BEGIN(3, 2);
4114 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4115 IEM_MC_LOCAL(uint64_t, u64Dst);
4116 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4117 IEM_MC_ARG(uint64_t, u64Src, 2);
4118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4119
4120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4122 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4123 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4124
4125 IEM_MC_PREPARE_FPU_USAGE();
4126 IEM_MC_FPU_TO_MMX_MODE();
4127 IEM_MC_REF_MXCSR(pfMxcsr);
4128
4129 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4130 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4131 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4132 } IEM_MC_ELSE() {
4133 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4134 } IEM_MC_ENDIF();
4135
4136 IEM_MC_ADVANCE_RIP_AND_FINISH();
4137 IEM_MC_END();
4138 }
4139}
4140
4141
4142/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4143FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4144{
4145 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4147 if (IEM_IS_MODRM_REG_MODE(bRm))
4148 {
4149 /*
4150 * Register, register.
4151 */
4152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4153
4154 IEM_MC_BEGIN(3, 1);
4155 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4156 IEM_MC_LOCAL(uint64_t, u64Dst);
4157 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4158 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4159 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4160 IEM_MC_PREPARE_FPU_USAGE();
4161 IEM_MC_FPU_TO_MMX_MODE();
4162
4163 IEM_MC_REF_MXCSR(pfMxcsr);
4164 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4165
4166 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4167 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4168 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4169 } IEM_MC_ELSE() {
4170 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4171 } IEM_MC_ENDIF();
4172
4173 IEM_MC_ADVANCE_RIP_AND_FINISH();
4174 IEM_MC_END();
4175 }
4176 else
4177 {
4178 /*
4179 * Register, memory.
4180 */
4181 IEM_MC_BEGIN(3, 3);
4182 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4183 IEM_MC_LOCAL(uint64_t, u64Dst);
4184 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4185 IEM_MC_LOCAL(X86XMMREG, uSrc);
4186 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4188
4189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4191 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4192 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4193
4194 IEM_MC_PREPARE_FPU_USAGE();
4195 IEM_MC_FPU_TO_MMX_MODE();
4196
4197 IEM_MC_REF_MXCSR(pfMxcsr);
4198
4199 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4200 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4201 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4202 } IEM_MC_ELSE() {
4203 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4204 } IEM_MC_ENDIF();
4205
4206 IEM_MC_ADVANCE_RIP_AND_FINISH();
4207 IEM_MC_END();
4208 }
4209}
4210
4211
4212/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4213FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4214{
4215 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4216
4217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4218 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4219 {
4220 if (IEM_IS_MODRM_REG_MODE(bRm))
4221 {
4222 /* greg64, XMM */
4223 IEM_MC_BEGIN(3, 2);
4224 IEM_MC_LOCAL(uint32_t, fMxcsr);
4225 IEM_MC_LOCAL(int64_t, i64Dst);
4226 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4227 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4228 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4229
4230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4231 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4232 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4233
4234 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4235 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4236 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4237 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4238 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4239 } IEM_MC_ELSE() {
4240 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4241 } IEM_MC_ENDIF();
4242
4243 IEM_MC_ADVANCE_RIP_AND_FINISH();
4244 IEM_MC_END();
4245 }
4246 else
4247 {
4248 /* greg64, [mem64] */
4249 IEM_MC_BEGIN(3, 4);
4250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4251 IEM_MC_LOCAL(uint32_t, fMxcsr);
4252 IEM_MC_LOCAL(int64_t, i64Dst);
4253 IEM_MC_LOCAL(uint32_t, u32Src);
4254 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4255 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4256 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4257
4258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4260 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4261 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4262
4263 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4264 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4265 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4266 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4267 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4268 } IEM_MC_ELSE() {
4269 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4270 } IEM_MC_ENDIF();
4271
4272 IEM_MC_ADVANCE_RIP_AND_FINISH();
4273 IEM_MC_END();
4274 }
4275 }
4276 else
4277 {
4278 if (IEM_IS_MODRM_REG_MODE(bRm))
4279 {
4280 /* greg, XMM */
4281 IEM_MC_BEGIN(3, 2);
4282 IEM_MC_LOCAL(uint32_t, fMxcsr);
4283 IEM_MC_LOCAL(int32_t, i32Dst);
4284 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4285 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4286 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4287
4288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4289 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4290 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4291
4292 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4293 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4294 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4295 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4296 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4297 } IEM_MC_ELSE() {
4298 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4299 } IEM_MC_ENDIF();
4300
4301 IEM_MC_ADVANCE_RIP_AND_FINISH();
4302 IEM_MC_END();
4303 }
4304 else
4305 {
4306 /* greg, [mem] */
4307 IEM_MC_BEGIN(3, 4);
4308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4309 IEM_MC_LOCAL(uint32_t, fMxcsr);
4310 IEM_MC_LOCAL(int32_t, i32Dst);
4311 IEM_MC_LOCAL(uint32_t, u32Src);
4312 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4313 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4314 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4315
4316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4318 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4319 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4320
4321 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4322 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4323 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4324 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4325 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4326 } IEM_MC_ELSE() {
4327 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4328 } IEM_MC_ENDIF();
4329
4330 IEM_MC_ADVANCE_RIP_AND_FINISH();
4331 IEM_MC_END();
4332 }
4333 }
4334}
4335
4336
4337/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4338FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4339{
4340 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4341
4342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4343 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4344 {
4345 if (IEM_IS_MODRM_REG_MODE(bRm))
4346 {
4347 /* greg64, XMM */
4348 IEM_MC_BEGIN(3, 2);
4349 IEM_MC_LOCAL(uint32_t, fMxcsr);
4350 IEM_MC_LOCAL(int64_t, i64Dst);
4351 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4352 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4353 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4354
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4356 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4357 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4358
4359 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4360 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4361 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4362 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4363 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4364 } IEM_MC_ELSE() {
4365 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4366 } IEM_MC_ENDIF();
4367
4368 IEM_MC_ADVANCE_RIP_AND_FINISH();
4369 IEM_MC_END();
4370 }
4371 else
4372 {
4373 /* greg64, [mem64] */
4374 IEM_MC_BEGIN(3, 4);
4375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4376 IEM_MC_LOCAL(uint32_t, fMxcsr);
4377 IEM_MC_LOCAL(int64_t, i64Dst);
4378 IEM_MC_LOCAL(uint64_t, u64Src);
4379 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4380 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4381 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4382
4383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4385 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4386 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4387
4388 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4389 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4390 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4391 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4392 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4393 } IEM_MC_ELSE() {
4394 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4395 } IEM_MC_ENDIF();
4396
4397 IEM_MC_ADVANCE_RIP_AND_FINISH();
4398 IEM_MC_END();
4399 }
4400 }
4401 else
4402 {
4403 if (IEM_IS_MODRM_REG_MODE(bRm))
4404 {
4405 /* greg, XMM */
4406 IEM_MC_BEGIN(3, 2);
4407 IEM_MC_LOCAL(uint32_t, fMxcsr);
4408 IEM_MC_LOCAL(int32_t, i32Dst);
4409 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4410 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4411 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4412
4413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4414 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4415 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4416
4417 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4418 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4419 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4420 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4421 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4422 } IEM_MC_ELSE() {
4423 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4424 } IEM_MC_ENDIF();
4425
4426 IEM_MC_ADVANCE_RIP_AND_FINISH();
4427 IEM_MC_END();
4428 }
4429 else
4430 {
4431 /* greg32, [mem32] */
4432 IEM_MC_BEGIN(3, 4);
4433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4434 IEM_MC_LOCAL(uint32_t, fMxcsr);
4435 IEM_MC_LOCAL(int32_t, i32Dst);
4436 IEM_MC_LOCAL(uint64_t, u64Src);
4437 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4438 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4439 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4440
4441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4443 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4444 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4445
4446 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4447 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4448 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4449 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4450 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4451 } IEM_MC_ELSE() {
4452 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4453 } IEM_MC_ENDIF();
4454
4455 IEM_MC_ADVANCE_RIP_AND_FINISH();
4456 IEM_MC_END();
4457 }
4458 }
4459}
4460
4461
4462/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4463FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4464{
4465 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4467 if (IEM_IS_MODRM_REG_MODE(bRm))
4468 {
4469 /*
4470 * Register, register.
4471 */
4472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4473
4474 IEM_MC_BEGIN(3, 1);
4475 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4476 IEM_MC_LOCAL(uint64_t, u64Dst);
4477 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4478 IEM_MC_ARG(uint64_t, u64Src, 2);
4479 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4480 IEM_MC_PREPARE_FPU_USAGE();
4481 IEM_MC_FPU_TO_MMX_MODE();
4482
4483 IEM_MC_REF_MXCSR(pfMxcsr);
4484 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4485
4486 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4487 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4488 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4489 } IEM_MC_ELSE() {
4490 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4491 } IEM_MC_ENDIF();
4492
4493 IEM_MC_ADVANCE_RIP_AND_FINISH();
4494 IEM_MC_END();
4495 }
4496 else
4497 {
4498 /*
4499 * Register, memory.
4500 */
4501 IEM_MC_BEGIN(3, 2);
4502 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4503 IEM_MC_LOCAL(uint64_t, u64Dst);
4504 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4505 IEM_MC_ARG(uint64_t, u64Src, 2);
4506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4507
4508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4510 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4511 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4512
4513 IEM_MC_PREPARE_FPU_USAGE();
4514 IEM_MC_FPU_TO_MMX_MODE();
4515 IEM_MC_REF_MXCSR(pfMxcsr);
4516
4517 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4518 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4519 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4520 } IEM_MC_ELSE() {
4521 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4522 } IEM_MC_ENDIF();
4523
4524 IEM_MC_ADVANCE_RIP_AND_FINISH();
4525 IEM_MC_END();
4526 }
4527}
4528
4529
4530/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4531FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4532{
4533 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4535 if (IEM_IS_MODRM_REG_MODE(bRm))
4536 {
4537 /*
4538 * Register, register.
4539 */
4540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4541
4542 IEM_MC_BEGIN(3, 1);
4543 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4544 IEM_MC_LOCAL(uint64_t, u64Dst);
4545 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4546 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4547 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4548 IEM_MC_PREPARE_FPU_USAGE();
4549 IEM_MC_FPU_TO_MMX_MODE();
4550
4551 IEM_MC_REF_MXCSR(pfMxcsr);
4552 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4553
4554 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4555 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4556 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4557 } IEM_MC_ELSE() {
4558 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4559 } IEM_MC_ENDIF();
4560
4561 IEM_MC_ADVANCE_RIP_AND_FINISH();
4562 IEM_MC_END();
4563 }
4564 else
4565 {
4566 /*
4567 * Register, memory.
4568 */
4569 IEM_MC_BEGIN(3, 3);
4570 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4571 IEM_MC_LOCAL(uint64_t, u64Dst);
4572 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4573 IEM_MC_LOCAL(X86XMMREG, uSrc);
4574 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4576
4577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4579 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4580 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4581
4582 IEM_MC_PREPARE_FPU_USAGE();
4583 IEM_MC_FPU_TO_MMX_MODE();
4584
4585 IEM_MC_REF_MXCSR(pfMxcsr);
4586
4587 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4588 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4589 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4590 } IEM_MC_ELSE() {
4591 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4592 } IEM_MC_ENDIF();
4593
4594 IEM_MC_ADVANCE_RIP_AND_FINISH();
4595 IEM_MC_END();
4596 }
4597}
4598
4599
4600/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4601FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4602{
4603 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4604
4605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4606 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4607 {
4608 if (IEM_IS_MODRM_REG_MODE(bRm))
4609 {
4610 /* greg64, XMM */
4611 IEM_MC_BEGIN(3, 2);
4612 IEM_MC_LOCAL(uint32_t, fMxcsr);
4613 IEM_MC_LOCAL(int64_t, i64Dst);
4614 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4615 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4616 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4617
4618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4619 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4620 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4621
4622 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4623 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4624 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4625 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4626 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4627 } IEM_MC_ELSE() {
4628 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4629 } IEM_MC_ENDIF();
4630
4631 IEM_MC_ADVANCE_RIP_AND_FINISH();
4632 IEM_MC_END();
4633 }
4634 else
4635 {
4636 /* greg64, [mem64] */
4637 IEM_MC_BEGIN(3, 4);
4638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4639 IEM_MC_LOCAL(uint32_t, fMxcsr);
4640 IEM_MC_LOCAL(int64_t, i64Dst);
4641 IEM_MC_LOCAL(uint32_t, u32Src);
4642 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4643 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4644 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4645
4646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4648 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4649 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4650
4651 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4652 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4653 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4654 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4655 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4656 } IEM_MC_ELSE() {
4657 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4658 } IEM_MC_ENDIF();
4659
4660 IEM_MC_ADVANCE_RIP_AND_FINISH();
4661 IEM_MC_END();
4662 }
4663 }
4664 else
4665 {
4666 if (IEM_IS_MODRM_REG_MODE(bRm))
4667 {
4668 /* greg, XMM */
4669 IEM_MC_BEGIN(3, 2);
4670 IEM_MC_LOCAL(uint32_t, fMxcsr);
4671 IEM_MC_LOCAL(int32_t, i32Dst);
4672 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4673 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4674 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4675
4676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4677 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4678 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4679
4680 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4681 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4682 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4683 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4684 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4685 } IEM_MC_ELSE() {
4686 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4687 } IEM_MC_ENDIF();
4688
4689 IEM_MC_ADVANCE_RIP_AND_FINISH();
4690 IEM_MC_END();
4691 }
4692 else
4693 {
4694 /* greg, [mem] */
4695 IEM_MC_BEGIN(3, 4);
4696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4697 IEM_MC_LOCAL(uint32_t, fMxcsr);
4698 IEM_MC_LOCAL(int32_t, i32Dst);
4699 IEM_MC_LOCAL(uint32_t, u32Src);
4700 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4701 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4702 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4703
4704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4706 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4707 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4708
4709 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4710 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4711 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4712 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4713 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4714 } IEM_MC_ELSE() {
4715 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4716 } IEM_MC_ENDIF();
4717
4718 IEM_MC_ADVANCE_RIP_AND_FINISH();
4719 IEM_MC_END();
4720 }
4721 }
4722}
4723
4724
4725/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4726FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4727{
4728 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4729
4730 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4731 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4732 {
4733 if (IEM_IS_MODRM_REG_MODE(bRm))
4734 {
4735 /* greg64, XMM */
4736 IEM_MC_BEGIN(3, 2);
4737 IEM_MC_LOCAL(uint32_t, fMxcsr);
4738 IEM_MC_LOCAL(int64_t, i64Dst);
4739 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4740 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4741 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4742
4743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4744 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4745 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4746
4747 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4748 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4749 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4750 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4751 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4752 } IEM_MC_ELSE() {
4753 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4754 } IEM_MC_ENDIF();
4755
4756 IEM_MC_ADVANCE_RIP_AND_FINISH();
4757 IEM_MC_END();
4758 }
4759 else
4760 {
4761 /* greg64, [mem64] */
4762 IEM_MC_BEGIN(3, 4);
4763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4764 IEM_MC_LOCAL(uint32_t, fMxcsr);
4765 IEM_MC_LOCAL(int64_t, i64Dst);
4766 IEM_MC_LOCAL(uint64_t, u64Src);
4767 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4768 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4769 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4770
4771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4773 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4774 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4775
4776 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4777 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4778 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4779 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4780 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4781 } IEM_MC_ELSE() {
4782 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4783 } IEM_MC_ENDIF();
4784
4785 IEM_MC_ADVANCE_RIP_AND_FINISH();
4786 IEM_MC_END();
4787 }
4788 }
4789 else
4790 {
4791 if (IEM_IS_MODRM_REG_MODE(bRm))
4792 {
4793 /* greg32, XMM */
4794 IEM_MC_BEGIN(3, 2);
4795 IEM_MC_LOCAL(uint32_t, fMxcsr);
4796 IEM_MC_LOCAL(int32_t, i32Dst);
4797 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4798 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4799 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4800
4801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4802 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4803 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4804
4805 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4806 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4807 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4808 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4809 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4810 } IEM_MC_ELSE() {
4811 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4812 } IEM_MC_ENDIF();
4813
4814 IEM_MC_ADVANCE_RIP_AND_FINISH();
4815 IEM_MC_END();
4816 }
4817 else
4818 {
4819 /* greg32, [mem64] */
4820 IEM_MC_BEGIN(3, 4);
4821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4822 IEM_MC_LOCAL(uint32_t, fMxcsr);
4823 IEM_MC_LOCAL(int32_t, i32Dst);
4824 IEM_MC_LOCAL(uint64_t, u64Src);
4825 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4826 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4827 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4828
4829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4831 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4832 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4833
4834 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4835 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4836 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4837 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4838 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4839 } IEM_MC_ELSE() {
4840 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4841 } IEM_MC_ENDIF();
4842
4843 IEM_MC_ADVANCE_RIP_AND_FINISH();
4844 IEM_MC_END();
4845 }
4846 }
4847}
4848
4849
4850/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4851FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4852{
4853 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4855 if (IEM_IS_MODRM_REG_MODE(bRm))
4856 {
4857 /*
4858 * Register, register.
4859 */
4860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4861 IEM_MC_BEGIN(4, 1);
4862 IEM_MC_LOCAL(uint32_t, fEFlags);
4863 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4864 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4865 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4866 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4867 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4868 IEM_MC_PREPARE_SSE_USAGE();
4869 IEM_MC_FETCH_EFLAGS(fEFlags);
4870 IEM_MC_REF_MXCSR(pfMxcsr);
4871 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4872 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4873 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4874 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4875 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4876 } IEM_MC_ELSE() {
4877 IEM_MC_COMMIT_EFLAGS(fEFlags);
4878 } IEM_MC_ENDIF();
4879
4880 IEM_MC_ADVANCE_RIP_AND_FINISH();
4881 IEM_MC_END();
4882 }
4883 else
4884 {
4885 /*
4886 * Register, memory.
4887 */
4888 IEM_MC_BEGIN(4, 3);
4889 IEM_MC_LOCAL(uint32_t, fEFlags);
4890 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4891 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4892 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4893 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4894 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4896
4897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4900 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4901
4902 IEM_MC_PREPARE_SSE_USAGE();
4903 IEM_MC_FETCH_EFLAGS(fEFlags);
4904 IEM_MC_REF_MXCSR(pfMxcsr);
4905 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4906 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4907 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4908 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4909 } IEM_MC_ELSE() {
4910 IEM_MC_COMMIT_EFLAGS(fEFlags);
4911 } IEM_MC_ENDIF();
4912
4913 IEM_MC_ADVANCE_RIP_AND_FINISH();
4914 IEM_MC_END();
4915 }
4916}
4917
4918
4919/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4920FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4921{
4922 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4924 if (IEM_IS_MODRM_REG_MODE(bRm))
4925 {
4926 /*
4927 * Register, register.
4928 */
4929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4930 IEM_MC_BEGIN(4, 1);
4931 IEM_MC_LOCAL(uint32_t, fEFlags);
4932 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4933 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4934 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4935 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4936 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4937 IEM_MC_PREPARE_SSE_USAGE();
4938 IEM_MC_FETCH_EFLAGS(fEFlags);
4939 IEM_MC_REF_MXCSR(pfMxcsr);
4940 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4941 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4942 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4943 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4944 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4945 } IEM_MC_ELSE() {
4946 IEM_MC_COMMIT_EFLAGS(fEFlags);
4947 } IEM_MC_ENDIF();
4948
4949 IEM_MC_ADVANCE_RIP_AND_FINISH();
4950 IEM_MC_END();
4951 }
4952 else
4953 {
4954 /*
4955 * Register, memory.
4956 */
4957 IEM_MC_BEGIN(4, 3);
4958 IEM_MC_LOCAL(uint32_t, fEFlags);
4959 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4960 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4961 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4962 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4963 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4965
4966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4968 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4969 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4970
4971 IEM_MC_PREPARE_SSE_USAGE();
4972 IEM_MC_FETCH_EFLAGS(fEFlags);
4973 IEM_MC_REF_MXCSR(pfMxcsr);
4974 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4975 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4976 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4977 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4978 } IEM_MC_ELSE() {
4979 IEM_MC_COMMIT_EFLAGS(fEFlags);
4980 } IEM_MC_ENDIF();
4981
4982 IEM_MC_ADVANCE_RIP_AND_FINISH();
4983 IEM_MC_END();
4984 }
4985}
4986
4987
4988/* Opcode 0xf3 0x0f 0x2e - invalid */
4989/* Opcode 0xf2 0x0f 0x2e - invalid */
4990
4991
4992/** Opcode 0x0f 0x2f - comiss Vss, Wss */
4993FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4994{
4995 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4997 if (IEM_IS_MODRM_REG_MODE(bRm))
4998 {
4999 /*
5000 * Register, register.
5001 */
5002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5003 IEM_MC_BEGIN(4, 1);
5004 IEM_MC_LOCAL(uint32_t, fEFlags);
5005 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5006 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5007 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5008 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5009 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5010 IEM_MC_PREPARE_SSE_USAGE();
5011 IEM_MC_FETCH_EFLAGS(fEFlags);
5012 IEM_MC_REF_MXCSR(pfMxcsr);
5013 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5014 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5015 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5016 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5017 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5018 } IEM_MC_ELSE() {
5019 IEM_MC_COMMIT_EFLAGS(fEFlags);
5020 } IEM_MC_ENDIF();
5021
5022 IEM_MC_ADVANCE_RIP_AND_FINISH();
5023 IEM_MC_END();
5024 }
5025 else
5026 {
5027 /*
5028 * Register, memory.
5029 */
5030 IEM_MC_BEGIN(4, 3);
5031 IEM_MC_LOCAL(uint32_t, fEFlags);
5032 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5033 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5034 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5035 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5036 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5038
5039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5041 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5042 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5043
5044 IEM_MC_PREPARE_SSE_USAGE();
5045 IEM_MC_FETCH_EFLAGS(fEFlags);
5046 IEM_MC_REF_MXCSR(pfMxcsr);
5047 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5048 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5049 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5050 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5051 } IEM_MC_ELSE() {
5052 IEM_MC_COMMIT_EFLAGS(fEFlags);
5053 } IEM_MC_ENDIF();
5054
5055 IEM_MC_ADVANCE_RIP_AND_FINISH();
5056 IEM_MC_END();
5057 }
5058}
5059
5060
5061/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5062FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5063{
5064 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5066 if (IEM_IS_MODRM_REG_MODE(bRm))
5067 {
5068 /*
5069 * Register, register.
5070 */
5071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5072 IEM_MC_BEGIN(4, 1);
5073 IEM_MC_LOCAL(uint32_t, fEFlags);
5074 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5075 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5076 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5077 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5078 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5079 IEM_MC_PREPARE_SSE_USAGE();
5080 IEM_MC_FETCH_EFLAGS(fEFlags);
5081 IEM_MC_REF_MXCSR(pfMxcsr);
5082 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5083 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5084 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5085 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5086 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5087 } IEM_MC_ELSE() {
5088 IEM_MC_COMMIT_EFLAGS(fEFlags);
5089 } IEM_MC_ENDIF();
5090
5091 IEM_MC_ADVANCE_RIP_AND_FINISH();
5092 IEM_MC_END();
5093 }
5094 else
5095 {
5096 /*
5097 * Register, memory.
5098 */
5099 IEM_MC_BEGIN(4, 3);
5100 IEM_MC_LOCAL(uint32_t, fEFlags);
5101 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5102 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5103 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5104 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5105 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5107
5108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5111 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5112
5113 IEM_MC_PREPARE_SSE_USAGE();
5114 IEM_MC_FETCH_EFLAGS(fEFlags);
5115 IEM_MC_REF_MXCSR(pfMxcsr);
5116 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5117 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5118 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5119 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5120 } IEM_MC_ELSE() {
5121 IEM_MC_COMMIT_EFLAGS(fEFlags);
5122 } IEM_MC_ENDIF();
5123
5124 IEM_MC_ADVANCE_RIP_AND_FINISH();
5125 IEM_MC_END();
5126 }
5127}
5128
5129
5130/* Opcode 0xf3 0x0f 0x2f - invalid */
5131/* Opcode 0xf2 0x0f 0x2f - invalid */
5132
5133/** Opcode 0x0f 0x30. */
5134FNIEMOP_DEF(iemOp_wrmsr)
5135{
5136 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5138 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
5139}
5140
5141
5142/** Opcode 0x0f 0x31. */
5143FNIEMOP_DEF(iemOp_rdtsc)
5144{
5145 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5147 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
5148}
5149
5150
5151/** Opcode 0x0f 0x33. */
5152FNIEMOP_DEF(iemOp_rdmsr)
5153{
5154 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5156 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
5157}
5158
5159
5160/** Opcode 0x0f 0x34. */
5161FNIEMOP_DEF(iemOp_rdpmc)
5162{
5163 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
5166}
5167
5168
5169/** Opcode 0x0f 0x34. */
5170FNIEMOP_DEF(iemOp_sysenter)
5171{
5172 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
5175}
5176
5177/** Opcode 0x0f 0x35. */
5178FNIEMOP_DEF(iemOp_sysexit)
5179{
5180 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5182 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5183}
5184
5185/** Opcode 0x0f 0x37. */
5186FNIEMOP_STUB(iemOp_getsec);
5187
5188
5189/** Opcode 0x0f 0x38. */
5190FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5191{
5192#ifdef IEM_WITH_THREE_0F_38
5193 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5194 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5195#else
5196 IEMOP_BITCH_ABOUT_STUB();
5197 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5198#endif
5199}
5200
5201
5202/** Opcode 0x0f 0x3a. */
5203FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5204{
5205#ifdef IEM_WITH_THREE_0F_3A
5206 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5207 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5208#else
5209 IEMOP_BITCH_ABOUT_STUB();
5210 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5211#endif
5212}
5213
5214
5215/**
5216 * Implements a conditional move.
5217 *
5218 * Wish there was an obvious way to do this where we could share and reduce
5219 * code bloat.
5220 *
5221 * @param a_Cnd The conditional "microcode" operation.
5222 */
5223#define CMOV_X(a_Cnd) \
5224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5225 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5226 { \
5227 switch (pVCpu->iem.s.enmEffOpSize) \
5228 { \
5229 case IEMMODE_16BIT: \
5230 IEM_MC_BEGIN(0, 1); \
5231 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5232 a_Cnd { \
5233 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5234 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5235 } IEM_MC_ENDIF(); \
5236 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5237 IEM_MC_END(); \
5238 break; \
5239 \
5240 case IEMMODE_32BIT: \
5241 IEM_MC_BEGIN(0, 1); \
5242 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5243 a_Cnd { \
5244 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5245 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5246 } IEM_MC_ELSE() { \
5247 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5248 } IEM_MC_ENDIF(); \
5249 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5250 IEM_MC_END(); \
5251 break; \
5252 \
5253 case IEMMODE_64BIT: \
5254 IEM_MC_BEGIN(0, 1); \
5255 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5256 a_Cnd { \
5257 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5258 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5259 } IEM_MC_ENDIF(); \
5260 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5261 IEM_MC_END(); \
5262 break; \
5263 \
5264 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5265 } \
5266 } \
5267 else \
5268 { \
5269 switch (pVCpu->iem.s.enmEffOpSize) \
5270 { \
5271 case IEMMODE_16BIT: \
5272 IEM_MC_BEGIN(0, 2); \
5273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5274 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5276 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5277 a_Cnd { \
5278 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5279 } IEM_MC_ENDIF(); \
5280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5281 IEM_MC_END(); \
5282 break; \
5283 \
5284 case IEMMODE_32BIT: \
5285 IEM_MC_BEGIN(0, 2); \
5286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5287 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5289 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5290 a_Cnd { \
5291 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5292 } IEM_MC_ELSE() { \
5293 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5294 } IEM_MC_ENDIF(); \
5295 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5296 IEM_MC_END(); \
5297 break; \
5298 \
5299 case IEMMODE_64BIT: \
5300 IEM_MC_BEGIN(0, 2); \
5301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5302 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5304 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5305 a_Cnd { \
5306 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5307 } IEM_MC_ENDIF(); \
5308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5309 IEM_MC_END(); \
5310 break; \
5311 \
5312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5313 } \
5314 } do {} while (0)
5315
5316
5317
5318/** Opcode 0x0f 0x40. */
5319FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5320{
5321 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5322 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5323}
5324
5325
5326/** Opcode 0x0f 0x41. */
5327FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5328{
5329 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5330 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5331}
5332
5333
5334/** Opcode 0x0f 0x42. */
5335FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5336{
5337 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5338 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5339}
5340
5341
5342/** Opcode 0x0f 0x43. */
5343FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5344{
5345 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5346 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5347}
5348
5349
5350/** Opcode 0x0f 0x44. */
5351FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5352{
5353 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5354 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5355}
5356
5357
5358/** Opcode 0x0f 0x45. */
5359FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5360{
5361 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5362 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5363}
5364
5365
5366/** Opcode 0x0f 0x46. */
5367FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5368{
5369 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5370 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5371}
5372
5373
5374/** Opcode 0x0f 0x47. */
5375FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5376{
5377 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5378 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5379}
5380
5381
5382/** Opcode 0x0f 0x48. */
5383FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5384{
5385 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5386 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5387}
5388
5389
5390/** Opcode 0x0f 0x49. */
5391FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5392{
5393 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5394 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5395}
5396
5397
5398/** Opcode 0x0f 0x4a. */
5399FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5400{
5401 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5402 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5403}
5404
5405
5406/** Opcode 0x0f 0x4b. */
5407FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5408{
5409 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5410 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5411}
5412
5413
5414/** Opcode 0x0f 0x4c. */
5415FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5416{
5417 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5418 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5419}
5420
5421
5422/** Opcode 0x0f 0x4d. */
5423FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5424{
5425 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5426 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5427}
5428
5429
5430/** Opcode 0x0f 0x4e. */
5431FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5432{
5433 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5434 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5435}
5436
5437
5438/** Opcode 0x0f 0x4f. */
5439FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5440{
5441 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5442 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5443}
5444
5445#undef CMOV_X
5446
5447/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5448FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5449{
5450 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5452 if (IEM_IS_MODRM_REG_MODE(bRm))
5453 {
5454 /*
5455 * Register, register.
5456 */
5457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5458 IEM_MC_BEGIN(2, 1);
5459 IEM_MC_LOCAL(uint8_t, u8Dst);
5460 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5461 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5462 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5463 IEM_MC_PREPARE_SSE_USAGE();
5464 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5465 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5466 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5467 IEM_MC_ADVANCE_RIP_AND_FINISH();
5468 IEM_MC_END();
5469 }
5470 /* No memory operand. */
5471 else
5472 return IEMOP_RAISE_INVALID_OPCODE();
5473}
5474
5475
5476/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5477FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5478{
5479 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5481 if (IEM_IS_MODRM_REG_MODE(bRm))
5482 {
5483 /*
5484 * Register, register.
5485 */
5486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5487 IEM_MC_BEGIN(2, 1);
5488 IEM_MC_LOCAL(uint8_t, u8Dst);
5489 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5490 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5491 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5492 IEM_MC_PREPARE_SSE_USAGE();
5493 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5494 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5495 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5496 IEM_MC_ADVANCE_RIP_AND_FINISH();
5497 IEM_MC_END();
5498 }
5499 /* No memory operand. */
5500 else
5501 return IEMOP_RAISE_INVALID_OPCODE();
5502
5503}
5504
5505
5506/* Opcode 0xf3 0x0f 0x50 - invalid */
5507/* Opcode 0xf2 0x0f 0x50 - invalid */
5508
5509
5510/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5511FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5512{
5513 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5514 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5515}
5516
5517
5518/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5519FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5520{
5521 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5522 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5523}
5524
5525
5526/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5527FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5528{
5529 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5530 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5531}
5532
5533
5534/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5535FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5536{
5537 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5538 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5539}
5540
5541
5542/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5543FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5544{
5545 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5546 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5547}
5548
5549
5550/* Opcode 0x66 0x0f 0x52 - invalid */
5551
5552
5553/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5554FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5555{
5556 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5557 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5558}
5559
5560
5561/* Opcode 0xf2 0x0f 0x52 - invalid */
5562
5563/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5564FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5565/* Opcode 0x66 0x0f 0x53 - invalid */
5566/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5567FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5568/* Opcode 0xf2 0x0f 0x53 - invalid */
5569
5570
5571/** Opcode 0x0f 0x54 - andps Vps, Wps */
5572FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5573{
5574 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5575 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5576}
5577
5578
5579/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5580FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5581{
5582 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5583 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5584}
5585
5586
5587/* Opcode 0xf3 0x0f 0x54 - invalid */
5588/* Opcode 0xf2 0x0f 0x54 - invalid */
5589
5590
5591/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5592FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5593{
5594 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5595 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5596}
5597
5598
5599/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5600FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5601{
5602 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5603 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5604}
5605
5606
5607/* Opcode 0xf3 0x0f 0x55 - invalid */
5608/* Opcode 0xf2 0x0f 0x55 - invalid */
5609
5610
5611/** Opcode 0x0f 0x56 - orps Vps, Wps */
5612FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5613{
5614 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5615 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5616}
5617
5618
5619/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5620FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5621{
5622 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5623 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5624}
5625
5626
5627/* Opcode 0xf3 0x0f 0x56 - invalid */
5628/* Opcode 0xf2 0x0f 0x56 - invalid */
5629
5630
5631/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5632FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5633{
5634 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5635 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5636}
5637
5638
5639/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5640FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5641{
5642 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5643 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5644}
5645
5646
5647/* Opcode 0xf3 0x0f 0x57 - invalid */
5648/* Opcode 0xf2 0x0f 0x57 - invalid */
5649
5650/** Opcode 0x0f 0x58 - addps Vps, Wps */
5651FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5652{
5653 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5654 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5655}
5656
5657
5658/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5659FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5660{
5661 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5662 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5663}
5664
5665
5666/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5667FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5668{
5669 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5670 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5671}
5672
5673
5674/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5675FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5676{
5677 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5678 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5679}
5680
5681
5682/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5683FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5684{
5685 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5686 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5687}
5688
5689
5690/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5691FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5692{
5693 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5694 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5695}
5696
5697
5698/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5699FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5700{
5701 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5702 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5703}
5704
5705
5706/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5707FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5708{
5709 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5710 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5711}
5712
5713
5714/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5715FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5716{
5717 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5718 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5719}
5720
5721
5722/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5723FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5724{
5725 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5726 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5727}
5728
5729
5730/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5731FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5732{
5733 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5734 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5735}
5736
5737
5738/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5739FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5740{
5741 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5742 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5743}
5744
5745
5746/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5747FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5748{
5749 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5750 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5751}
5752
5753
5754/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5755FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5756{
5757 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5758 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5759}
5760
5761
5762/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5763FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5764{
5765 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5766 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5767}
5768
5769
5770/* Opcode 0xf2 0x0f 0x5b - invalid */
5771
5772
5773/** Opcode 0x0f 0x5c - subps Vps, Wps */
5774FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5775{
5776 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5777 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5778}
5779
5780
5781/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5782FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5783{
5784 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5785 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5786}
5787
5788
5789/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5790FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5791{
5792 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5793 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5794}
5795
5796
5797/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5798FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5799{
5800 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5801 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5802}
5803
5804
5805/** Opcode 0x0f 0x5d - minps Vps, Wps */
5806FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5807{
5808 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5809 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5810}
5811
5812
5813/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5814FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5815{
5816 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5817 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5818}
5819
5820
5821/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5822FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5823{
5824 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5825 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5826}
5827
5828
5829/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5830FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5831{
5832 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5833 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5834}
5835
5836
5837/** Opcode 0x0f 0x5e - divps Vps, Wps */
5838FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5839{
5840 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5841 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5842}
5843
5844
5845/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5846FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5847{
5848 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5849 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5850}
5851
5852
5853/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5854FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5855{
5856 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5857 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5858}
5859
5860
5861/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5862FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5863{
5864 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5865 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5866}
5867
5868
5869/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5870FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5871{
5872 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5873 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5874}
5875
5876
5877/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5878FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5879{
5880 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5881 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5882}
5883
5884
5885/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5886FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5887{
5888 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5889 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5890}
5891
5892
5893/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5894FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5895{
5896 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5897 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5898}
5899
5900
5901/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5902FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5903{
5904 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5905 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5906}
5907
5908
5909/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5910FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5911{
5912 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5913 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5914}
5915
5916
5917/* Opcode 0xf3 0x0f 0x60 - invalid */
5918
5919
5920/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5921FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5922{
5923 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5924 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5925 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5926}
5927
5928
5929/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5930FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5931{
5932 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5933 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5934}
5935
5936
5937/* Opcode 0xf3 0x0f 0x61 - invalid */
5938
5939
5940/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5941FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5942{
5943 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5944 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5945}
5946
5947
5948/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5949FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5950{
5951 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5952 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5953}
5954
5955
5956/* Opcode 0xf3 0x0f 0x62 - invalid */
5957
5958
5959
5960/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5961FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5962{
5963 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5964 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5965}
5966
5967
5968/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5969FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5970{
5971 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5972 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5973}
5974
5975
5976/* Opcode 0xf3 0x0f 0x63 - invalid */
5977
5978
5979/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5980FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5981{
5982 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5983 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5984}
5985
5986
5987/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5988FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5989{
5990 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5991 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5992}
5993
5994
5995/* Opcode 0xf3 0x0f 0x64 - invalid */
5996
5997
5998/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5999FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6000{
6001 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6002 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6003}
6004
6005
6006/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6007FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6008{
6009 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6010 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6011}
6012
6013
6014/* Opcode 0xf3 0x0f 0x65 - invalid */
6015
6016
6017/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6018FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6019{
6020 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6021 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6022}
6023
6024
6025/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6026FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6027{
6028 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6029 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6030}
6031
6032
6033/* Opcode 0xf3 0x0f 0x66 - invalid */
6034
6035
6036/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6037FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6038{
6039 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6040 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6041}
6042
6043
6044/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6045FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6046{
6047 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6048 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6049}
6050
6051
6052/* Opcode 0xf3 0x0f 0x67 - invalid */
6053
6054
6055/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6056 * @note Intel and AMD both uses Qd for the second parameter, however they
6057 * both list it as a mmX/mem64 operand and intel describes it as being
6058 * loaded as a qword, so it should be Qq, shouldn't it? */
6059FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6060{
6061 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6062 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6063}
6064
6065
6066/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6067FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6068{
6069 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6070 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6071}
6072
6073
6074/* Opcode 0xf3 0x0f 0x68 - invalid */
6075
6076
6077/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6078 * @note Intel and AMD both uses Qd for the second parameter, however they
6079 * both list it as a mmX/mem64 operand and intel describes it as being
6080 * loaded as a qword, so it should be Qq, shouldn't it? */
6081FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6082{
6083 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6084 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6085}
6086
6087
6088/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6089FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6090{
6091 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6092 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6093
6094}
6095
6096
6097/* Opcode 0xf3 0x0f 0x69 - invalid */
6098
6099
6100/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6101 * @note Intel and AMD both uses Qd for the second parameter, however they
6102 * both list it as a mmX/mem64 operand and intel describes it as being
6103 * loaded as a qword, so it should be Qq, shouldn't it? */
6104FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6105{
6106 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6107 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6108}
6109
6110
6111/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6112FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6113{
6114 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6115 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6116}
6117
6118
6119/* Opcode 0xf3 0x0f 0x6a - invalid */
6120
6121
6122/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6123FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6124{
6125 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6126 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6127}
6128
6129
6130/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6131FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6132{
6133 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6134 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6135}
6136
6137
6138/* Opcode 0xf3 0x0f 0x6b - invalid */
6139
6140
6141/* Opcode 0x0f 0x6c - invalid */
6142
6143
6144/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6145FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6146{
6147 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6148 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6149}
6150
6151
6152/* Opcode 0xf3 0x0f 0x6c - invalid */
6153/* Opcode 0xf2 0x0f 0x6c - invalid */
6154
6155
6156/* Opcode 0x0f 0x6d - invalid */
6157
6158
6159/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6160FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6161{
6162 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6163 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6164}
6165
6166
6167/* Opcode 0xf3 0x0f 0x6d - invalid */
6168
6169
6170FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6171{
6172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6173 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6174 {
6175 /**
6176 * @opcode 0x6e
6177 * @opcodesub rex.w=1
6178 * @oppfx none
6179 * @opcpuid mmx
6180 * @opgroup og_mmx_datamove
6181 * @opxcpttype 5
6182 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6183 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6184 */
6185 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6186 if (IEM_IS_MODRM_REG_MODE(bRm))
6187 {
6188 /* MMX, greg64 */
6189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6190 IEM_MC_BEGIN(0, 1);
6191 IEM_MC_LOCAL(uint64_t, u64Tmp);
6192
6193 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6194 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6195 IEM_MC_FPU_TO_MMX_MODE();
6196
6197 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6198 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6199
6200 IEM_MC_ADVANCE_RIP_AND_FINISH();
6201 IEM_MC_END();
6202 }
6203 else
6204 {
6205 /* MMX, [mem64] */
6206 IEM_MC_BEGIN(0, 2);
6207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6208 IEM_MC_LOCAL(uint64_t, u64Tmp);
6209
6210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6212 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6213 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6214 IEM_MC_FPU_TO_MMX_MODE();
6215
6216 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6217 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6218
6219 IEM_MC_ADVANCE_RIP_AND_FINISH();
6220 IEM_MC_END();
6221 }
6222 }
6223 else
6224 {
6225 /**
6226 * @opdone
6227 * @opcode 0x6e
6228 * @opcodesub rex.w=0
6229 * @oppfx none
6230 * @opcpuid mmx
6231 * @opgroup og_mmx_datamove
6232 * @opxcpttype 5
6233 * @opfunction iemOp_movd_q_Pd_Ey
6234 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6235 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6236 */
6237 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6238 if (IEM_IS_MODRM_REG_MODE(bRm))
6239 {
6240 /* MMX, greg32 */
6241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6242 IEM_MC_BEGIN(0, 1);
6243 IEM_MC_LOCAL(uint32_t, u32Tmp);
6244
6245 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6246 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6247 IEM_MC_FPU_TO_MMX_MODE();
6248
6249 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6250 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6251
6252 IEM_MC_ADVANCE_RIP_AND_FINISH();
6253 IEM_MC_END();
6254 }
6255 else
6256 {
6257 /* MMX, [mem32] */
6258 IEM_MC_BEGIN(0, 2);
6259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6260 IEM_MC_LOCAL(uint32_t, u32Tmp);
6261
6262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6264 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6265 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6266 IEM_MC_FPU_TO_MMX_MODE();
6267
6268 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6269 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6270
6271 IEM_MC_ADVANCE_RIP_AND_FINISH();
6272 IEM_MC_END();
6273 }
6274 }
6275}
6276
6277FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6278{
6279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6280 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6281 {
6282 /**
6283 * @opcode 0x6e
6284 * @opcodesub rex.w=1
6285 * @oppfx 0x66
6286 * @opcpuid sse2
6287 * @opgroup og_sse2_simdint_datamove
6288 * @opxcpttype 5
6289 * @optest 64-bit / op1=1 op2=2 -> op1=2
6290 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6291 */
6292 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6293 if (IEM_IS_MODRM_REG_MODE(bRm))
6294 {
6295 /* XMM, greg64 */
6296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6297 IEM_MC_BEGIN(0, 1);
6298 IEM_MC_LOCAL(uint64_t, u64Tmp);
6299
6300 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6301 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6302
6303 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6304 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6305
6306 IEM_MC_ADVANCE_RIP_AND_FINISH();
6307 IEM_MC_END();
6308 }
6309 else
6310 {
6311 /* XMM, [mem64] */
6312 IEM_MC_BEGIN(0, 2);
6313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6314 IEM_MC_LOCAL(uint64_t, u64Tmp);
6315
6316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6318 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6319 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6320
6321 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6322 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6323
6324 IEM_MC_ADVANCE_RIP_AND_FINISH();
6325 IEM_MC_END();
6326 }
6327 }
6328 else
6329 {
6330 /**
6331 * @opdone
6332 * @opcode 0x6e
6333 * @opcodesub rex.w=0
6334 * @oppfx 0x66
6335 * @opcpuid sse2
6336 * @opgroup og_sse2_simdint_datamove
6337 * @opxcpttype 5
6338 * @opfunction iemOp_movd_q_Vy_Ey
6339 * @optest op1=1 op2=2 -> op1=2
6340 * @optest op1=0 op2=-42 -> op1=-42
6341 */
6342 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6343 if (IEM_IS_MODRM_REG_MODE(bRm))
6344 {
6345 /* XMM, greg32 */
6346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6347 IEM_MC_BEGIN(0, 1);
6348 IEM_MC_LOCAL(uint32_t, u32Tmp);
6349
6350 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6351 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6352
6353 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6354 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6355
6356 IEM_MC_ADVANCE_RIP_AND_FINISH();
6357 IEM_MC_END();
6358 }
6359 else
6360 {
6361 /* XMM, [mem32] */
6362 IEM_MC_BEGIN(0, 2);
6363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6364 IEM_MC_LOCAL(uint32_t, u32Tmp);
6365
6366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6368 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6369 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6370
6371 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6372 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6373
6374 IEM_MC_ADVANCE_RIP_AND_FINISH();
6375 IEM_MC_END();
6376 }
6377 }
6378}
6379
6380/* Opcode 0xf3 0x0f 0x6e - invalid */
6381
6382
6383/**
6384 * @opcode 0x6f
6385 * @oppfx none
6386 * @opcpuid mmx
6387 * @opgroup og_mmx_datamove
6388 * @opxcpttype 5
6389 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6390 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6391 */
6392FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6393{
6394 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6396 if (IEM_IS_MODRM_REG_MODE(bRm))
6397 {
6398 /*
6399 * Register, register.
6400 */
6401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6402 IEM_MC_BEGIN(0, 1);
6403 IEM_MC_LOCAL(uint64_t, u64Tmp);
6404
6405 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6406 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6407 IEM_MC_FPU_TO_MMX_MODE();
6408
6409 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6410 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6411
6412 IEM_MC_ADVANCE_RIP_AND_FINISH();
6413 IEM_MC_END();
6414 }
6415 else
6416 {
6417 /*
6418 * Register, memory.
6419 */
6420 IEM_MC_BEGIN(0, 2);
6421 IEM_MC_LOCAL(uint64_t, u64Tmp);
6422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6423
6424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6426 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6427 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6428 IEM_MC_FPU_TO_MMX_MODE();
6429
6430 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6431 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6432
6433 IEM_MC_ADVANCE_RIP_AND_FINISH();
6434 IEM_MC_END();
6435 }
6436}
6437
6438/**
6439 * @opcode 0x6f
6440 * @oppfx 0x66
6441 * @opcpuid sse2
6442 * @opgroup og_sse2_simdint_datamove
6443 * @opxcpttype 1
6444 * @optest op1=1 op2=2 -> op1=2
6445 * @optest op1=0 op2=-42 -> op1=-42
6446 */
6447FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6448{
6449 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6451 if (IEM_IS_MODRM_REG_MODE(bRm))
6452 {
6453 /*
6454 * Register, register.
6455 */
6456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6457 IEM_MC_BEGIN(0, 0);
6458
6459 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6460 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6461
6462 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6463 IEM_GET_MODRM_RM(pVCpu, bRm));
6464 IEM_MC_ADVANCE_RIP_AND_FINISH();
6465 IEM_MC_END();
6466 }
6467 else
6468 {
6469 /*
6470 * Register, memory.
6471 */
6472 IEM_MC_BEGIN(0, 2);
6473 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6475
6476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6478 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6479 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6480
6481 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6482 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6483
6484 IEM_MC_ADVANCE_RIP_AND_FINISH();
6485 IEM_MC_END();
6486 }
6487}
6488
6489/**
6490 * @opcode 0x6f
6491 * @oppfx 0xf3
6492 * @opcpuid sse2
6493 * @opgroup og_sse2_simdint_datamove
6494 * @opxcpttype 4UA
6495 * @optest op1=1 op2=2 -> op1=2
6496 * @optest op1=0 op2=-42 -> op1=-42
6497 */
6498FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6499{
6500 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6502 if (IEM_IS_MODRM_REG_MODE(bRm))
6503 {
6504 /*
6505 * Register, register.
6506 */
6507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6508 IEM_MC_BEGIN(0, 0);
6509 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6510 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6511 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6512 IEM_GET_MODRM_RM(pVCpu, bRm));
6513 IEM_MC_ADVANCE_RIP_AND_FINISH();
6514 IEM_MC_END();
6515 }
6516 else
6517 {
6518 /*
6519 * Register, memory.
6520 */
6521 IEM_MC_BEGIN(0, 2);
6522 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6524
6525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6527 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6529 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6530 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6531
6532 IEM_MC_ADVANCE_RIP_AND_FINISH();
6533 IEM_MC_END();
6534 }
6535}
6536
6537
6538/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6539FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6540{
6541 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6543 if (IEM_IS_MODRM_REG_MODE(bRm))
6544 {
6545 /*
6546 * Register, register.
6547 */
6548 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6550
6551 IEM_MC_BEGIN(3, 0);
6552 IEM_MC_ARG(uint64_t *, pDst, 0);
6553 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6554 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6555 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6556 IEM_MC_PREPARE_FPU_USAGE();
6557 IEM_MC_FPU_TO_MMX_MODE();
6558
6559 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6560 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6562 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6563
6564 IEM_MC_ADVANCE_RIP_AND_FINISH();
6565 IEM_MC_END();
6566 }
6567 else
6568 {
6569 /*
6570 * Register, memory.
6571 */
6572 IEM_MC_BEGIN(3, 2);
6573 IEM_MC_ARG(uint64_t *, pDst, 0);
6574 IEM_MC_LOCAL(uint64_t, uSrc);
6575 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6577
6578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6579 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6580 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6582 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6583 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6584
6585 IEM_MC_PREPARE_FPU_USAGE();
6586 IEM_MC_FPU_TO_MMX_MODE();
6587
6588 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6589 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6590 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6591
6592 IEM_MC_ADVANCE_RIP_AND_FINISH();
6593 IEM_MC_END();
6594 }
6595}
6596
6597
6598/**
6599 * Common worker for SSE2 instructions on the forms:
6600 * pshufd xmm1, xmm2/mem128, imm8
6601 * pshufhw xmm1, xmm2/mem128, imm8
6602 * pshuflw xmm1, xmm2/mem128, imm8
6603 *
6604 * Proper alignment of the 128-bit operand is enforced.
6605 * Exceptions type 4. SSE2 cpuid checks.
6606 */
6607FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6608{
6609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6610 if (IEM_IS_MODRM_REG_MODE(bRm))
6611 {
6612 /*
6613 * Register, register.
6614 */
6615 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6617
6618 IEM_MC_BEGIN(3, 0);
6619 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6620 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6621 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6622 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6623 IEM_MC_PREPARE_SSE_USAGE();
6624 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6625 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6626 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6627 IEM_MC_ADVANCE_RIP_AND_FINISH();
6628 IEM_MC_END();
6629 }
6630 else
6631 {
6632 /*
6633 * Register, memory.
6634 */
6635 IEM_MC_BEGIN(3, 2);
6636 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6637 IEM_MC_LOCAL(RTUINT128U, uSrc);
6638 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6640
6641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6642 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6643 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6645 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6646
6647 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6648 IEM_MC_PREPARE_SSE_USAGE();
6649 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6650 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6651
6652 IEM_MC_ADVANCE_RIP_AND_FINISH();
6653 IEM_MC_END();
6654 }
6655}
6656
6657
6658/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6659FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6660{
6661 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6662 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6663}
6664
6665
6666/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6667FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6668{
6669 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6670 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6671}
6672
6673
6674/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6675FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6676{
6677 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6678 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6679}
6680
6681
6682/**
6683 * Common worker for MMX instructions of the form:
6684 * psrlw mm, imm8
6685 * psraw mm, imm8
6686 * psllw mm, imm8
6687 * psrld mm, imm8
6688 * psrad mm, imm8
6689 * pslld mm, imm8
6690 * psrlq mm, imm8
6691 * psllq mm, imm8
6692 *
6693 */
6694FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6695{
6696 if (IEM_IS_MODRM_REG_MODE(bRm))
6697 {
6698 /*
6699 * Register, immediate.
6700 */
6701 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6703
6704 IEM_MC_BEGIN(2, 0);
6705 IEM_MC_ARG(uint64_t *, pDst, 0);
6706 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6707 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6708 IEM_MC_PREPARE_FPU_USAGE();
6709 IEM_MC_FPU_TO_MMX_MODE();
6710
6711 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6712 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6713 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6714
6715 IEM_MC_ADVANCE_RIP_AND_FINISH();
6716 IEM_MC_END();
6717 }
6718 else
6719 {
6720 /*
6721 * Register, memory not supported.
6722 */
6723 /// @todo Caller already enforced register mode?!
6724 AssertFailedReturn(VINF_SUCCESS);
6725 }
6726}
6727
6728
6729/**
6730 * Common worker for SSE2 instructions of the form:
6731 * psrlw xmm, imm8
6732 * psraw xmm, imm8
6733 * psllw xmm, imm8
6734 * psrld xmm, imm8
6735 * psrad xmm, imm8
6736 * pslld xmm, imm8
6737 * psrlq xmm, imm8
6738 * psllq xmm, imm8
6739 *
6740 */
6741FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6742{
6743 if (IEM_IS_MODRM_REG_MODE(bRm))
6744 {
6745 /*
6746 * Register, immediate.
6747 */
6748 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6750
6751 IEM_MC_BEGIN(2, 0);
6752 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6753 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6754 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6755 IEM_MC_PREPARE_SSE_USAGE();
6756 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6757 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6758 IEM_MC_ADVANCE_RIP_AND_FINISH();
6759 IEM_MC_END();
6760 }
6761 else
6762 {
6763 /*
6764 * Register, memory.
6765 */
6766 /// @todo Caller already enforced register mode?!
6767 AssertFailedReturn(VINF_SUCCESS);
6768 }
6769}
6770
6771
6772/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6773FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6774{
6775// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6776 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6777}
6778
6779
6780/** Opcode 0x66 0x0f 0x71 11/2. */
6781FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6782{
6783// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6784 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6785}
6786
6787
6788/** Opcode 0x0f 0x71 11/4. */
6789FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6790{
6791// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6792 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6793}
6794
6795
6796/** Opcode 0x66 0x0f 0x71 11/4. */
6797FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6798{
6799// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6800 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6801}
6802
6803
6804/** Opcode 0x0f 0x71 11/6. */
6805FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6806{
6807// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6808 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6809}
6810
6811
6812/** Opcode 0x66 0x0f 0x71 11/6. */
6813FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6814{
6815// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6816 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6817}
6818
6819
6820/**
6821 * Group 12 jump table for register variant.
6822 */
6823IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6824{
6825 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6826 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6827 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6828 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6829 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6830 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6831 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6832 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6833};
6834AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6835
6836
6837/** Opcode 0x0f 0x71. */
6838FNIEMOP_DEF(iemOp_Grp12)
6839{
6840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6841 if (IEM_IS_MODRM_REG_MODE(bRm))
6842 /* register, register */
6843 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6844 + pVCpu->iem.s.idxPrefix], bRm);
6845 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6846}
6847
6848
6849/** Opcode 0x0f 0x72 11/2. */
6850FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6851{
6852// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6853 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6854}
6855
6856
6857/** Opcode 0x66 0x0f 0x72 11/2. */
6858FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6859{
6860// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6861 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6862}
6863
6864
6865/** Opcode 0x0f 0x72 11/4. */
6866FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6867{
6868// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6869 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6870}
6871
6872
6873/** Opcode 0x66 0x0f 0x72 11/4. */
6874FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6875{
6876// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6877 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6878}
6879
6880
6881/** Opcode 0x0f 0x72 11/6. */
6882FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6883{
6884// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6885 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6886}
6887
6888/** Opcode 0x66 0x0f 0x72 11/6. */
6889FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6890{
6891// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6892 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6893}
6894
6895
6896/**
6897 * Group 13 jump table for register variant.
6898 */
6899IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6900{
6901 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6902 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6903 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6904 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6905 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6906 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6907 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6908 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6909};
6910AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6911
6912/** Opcode 0x0f 0x72. */
6913FNIEMOP_DEF(iemOp_Grp13)
6914{
6915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6916 if (IEM_IS_MODRM_REG_MODE(bRm))
6917 /* register, register */
6918 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6919 + pVCpu->iem.s.idxPrefix], bRm);
6920 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6921}
6922
6923
6924/** Opcode 0x0f 0x73 11/2. */
6925FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6926{
6927// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6928 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6929}
6930
6931
6932/** Opcode 0x66 0x0f 0x73 11/2. */
6933FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6934{
6935// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6936 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6937}
6938
6939
6940/** Opcode 0x66 0x0f 0x73 11/3. */
6941FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6942{
6943// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6944 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6945}
6946
6947
6948/** Opcode 0x0f 0x73 11/6. */
6949FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6950{
6951// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6952 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6953}
6954
6955
6956/** Opcode 0x66 0x0f 0x73 11/6. */
6957FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6958{
6959// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6960 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6961}
6962
6963
6964/** Opcode 0x66 0x0f 0x73 11/7. */
6965FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6966{
6967// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6968 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6969}
6970
6971/**
6972 * Group 14 jump table for register variant.
6973 */
6974IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6975{
6976 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6977 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6978 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6979 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6980 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6981 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6982 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6983 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6984};
6985AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6986
6987
6988/** Opcode 0x0f 0x73. */
6989FNIEMOP_DEF(iemOp_Grp14)
6990{
6991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6992 if (IEM_IS_MODRM_REG_MODE(bRm))
6993 /* register, register */
6994 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6995 + pVCpu->iem.s.idxPrefix], bRm);
6996 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6997}
6998
6999
7000/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7001FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7002{
7003 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7004 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7005}
7006
7007
7008/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7009FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7010{
7011 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7012 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7013}
7014
7015
7016/* Opcode 0xf3 0x0f 0x74 - invalid */
7017/* Opcode 0xf2 0x0f 0x74 - invalid */
7018
7019
7020/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7021FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7022{
7023 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7024 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7025}
7026
7027
7028/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7029FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7030{
7031 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7032 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7033}
7034
7035
7036/* Opcode 0xf3 0x0f 0x75 - invalid */
7037/* Opcode 0xf2 0x0f 0x75 - invalid */
7038
7039
7040/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7041FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7042{
7043 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7044 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7045}
7046
7047
7048/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7049FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7050{
7051 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7052 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7053}
7054
7055
7056/* Opcode 0xf3 0x0f 0x76 - invalid */
7057/* Opcode 0xf2 0x0f 0x76 - invalid */
7058
7059
7060/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7061FNIEMOP_DEF(iemOp_emms)
7062{
7063 IEMOP_MNEMONIC(emms, "emms");
7064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7065
7066 IEM_MC_BEGIN(0,0);
7067 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7068 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7069 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7070 IEM_MC_FPU_FROM_MMX_MODE();
7071 IEM_MC_ADVANCE_RIP_AND_FINISH();
7072 IEM_MC_END();
7073}
7074
7075/* Opcode 0x66 0x0f 0x77 - invalid */
7076/* Opcode 0xf3 0x0f 0x77 - invalid */
7077/* Opcode 0xf2 0x0f 0x77 - invalid */
7078
7079/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7080#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7081FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7082{
7083 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7084 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7085 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7086 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7087
7088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7089 if (IEM_IS_MODRM_REG_MODE(bRm))
7090 {
7091 /*
7092 * Register, register.
7093 */
7094 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7095 if (enmEffOpSize == IEMMODE_64BIT)
7096 {
7097 IEM_MC_BEGIN(2, 0);
7098 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7099 IEM_MC_ARG(uint64_t, u64Enc, 1);
7100 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7101 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7102 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7103 IEM_MC_END();
7104 }
7105 else
7106 {
7107 IEM_MC_BEGIN(2, 0);
7108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7109 IEM_MC_ARG(uint32_t, u32Enc, 1);
7110 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7111 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7112 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7113 IEM_MC_END();
7114 }
7115 }
7116 else
7117 {
7118 /*
7119 * Memory, register.
7120 */
7121 if (enmEffOpSize == IEMMODE_64BIT)
7122 {
7123 IEM_MC_BEGIN(3, 0);
7124 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7125 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7126 IEM_MC_ARG(uint64_t, u64Enc, 2);
7127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7128 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7129 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7130 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7131 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7132 IEM_MC_END();
7133 }
7134 else
7135 {
7136 IEM_MC_BEGIN(3, 0);
7137 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7138 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7139 IEM_MC_ARG(uint32_t, u32Enc, 2);
7140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7141 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7142 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7143 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7144 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7145 IEM_MC_END();
7146 }
7147 }
7148}
7149#else
7150FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7151#endif
7152
7153/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7154FNIEMOP_STUB(iemOp_AmdGrp17);
7155/* Opcode 0xf3 0x0f 0x78 - invalid */
7156/* Opcode 0xf2 0x0f 0x78 - invalid */
7157
7158/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7159#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7160FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7161{
7162 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7163 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7164 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7165 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7166
7167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7168 if (IEM_IS_MODRM_REG_MODE(bRm))
7169 {
7170 /*
7171 * Register, register.
7172 */
7173 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7174 if (enmEffOpSize == IEMMODE_64BIT)
7175 {
7176 IEM_MC_BEGIN(2, 0);
7177 IEM_MC_ARG(uint64_t, u64Val, 0);
7178 IEM_MC_ARG(uint64_t, u64Enc, 1);
7179 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7180 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7181 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
7182 IEM_MC_END();
7183 }
7184 else
7185 {
7186 IEM_MC_BEGIN(2, 0);
7187 IEM_MC_ARG(uint32_t, u32Val, 0);
7188 IEM_MC_ARG(uint32_t, u32Enc, 1);
7189 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7190 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7191 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
7192 IEM_MC_END();
7193 }
7194 }
7195 else
7196 {
7197 /*
7198 * Register, memory.
7199 */
7200 if (enmEffOpSize == IEMMODE_64BIT)
7201 {
7202 IEM_MC_BEGIN(3, 0);
7203 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7204 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7205 IEM_MC_ARG(uint64_t, u64Enc, 2);
7206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7207 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7208 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7209 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7210 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7211 IEM_MC_END();
7212 }
7213 else
7214 {
7215 IEM_MC_BEGIN(3, 0);
7216 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7217 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7218 IEM_MC_ARG(uint32_t, u32Enc, 2);
7219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7220 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7221 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7222 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7223 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7224 IEM_MC_END();
7225 }
7226 }
7227}
7228#else
7229FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7230#endif
7231/* Opcode 0x66 0x0f 0x79 - invalid */
7232/* Opcode 0xf3 0x0f 0x79 - invalid */
7233/* Opcode 0xf2 0x0f 0x79 - invalid */
7234
7235/* Opcode 0x0f 0x7a - invalid */
7236/* Opcode 0x66 0x0f 0x7a - invalid */
7237/* Opcode 0xf3 0x0f 0x7a - invalid */
7238/* Opcode 0xf2 0x0f 0x7a - invalid */
7239
7240/* Opcode 0x0f 0x7b - invalid */
7241/* Opcode 0x66 0x0f 0x7b - invalid */
7242/* Opcode 0xf3 0x0f 0x7b - invalid */
7243/* Opcode 0xf2 0x0f 0x7b - invalid */
7244
7245/* Opcode 0x0f 0x7c - invalid */
7246
7247
7248/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7249FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7250{
7251 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7252 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7253}
7254
7255
7256/* Opcode 0xf3 0x0f 0x7c - invalid */
7257
7258
7259/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7260FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7261{
7262 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7263 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7264}
7265
7266
7267/* Opcode 0x0f 0x7d - invalid */
7268
7269
7270/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7271FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7272{
7273 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7274 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7275}
7276
7277
7278/* Opcode 0xf3 0x0f 0x7d - invalid */
7279
7280
7281/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7282FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7283{
7284 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7285 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7286}
7287
7288
7289/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7290FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7291{
7292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7293 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7294 {
7295 /**
7296 * @opcode 0x7e
7297 * @opcodesub rex.w=1
7298 * @oppfx none
7299 * @opcpuid mmx
7300 * @opgroup og_mmx_datamove
7301 * @opxcpttype 5
7302 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7303 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7304 */
7305 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7306 if (IEM_IS_MODRM_REG_MODE(bRm))
7307 {
7308 /* greg64, MMX */
7309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7310 IEM_MC_BEGIN(0, 1);
7311 IEM_MC_LOCAL(uint64_t, u64Tmp);
7312
7313 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7314 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7315 IEM_MC_FPU_TO_MMX_MODE();
7316
7317 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7318 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7319
7320 IEM_MC_ADVANCE_RIP_AND_FINISH();
7321 IEM_MC_END();
7322 }
7323 else
7324 {
7325 /* [mem64], MMX */
7326 IEM_MC_BEGIN(0, 2);
7327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7328 IEM_MC_LOCAL(uint64_t, u64Tmp);
7329
7330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7332 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7333 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7334 IEM_MC_FPU_TO_MMX_MODE();
7335
7336 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7337 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7338
7339 IEM_MC_ADVANCE_RIP_AND_FINISH();
7340 IEM_MC_END();
7341 }
7342 }
7343 else
7344 {
7345 /**
7346 * @opdone
7347 * @opcode 0x7e
7348 * @opcodesub rex.w=0
7349 * @oppfx none
7350 * @opcpuid mmx
7351 * @opgroup og_mmx_datamove
7352 * @opxcpttype 5
7353 * @opfunction iemOp_movd_q_Pd_Ey
7354 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7355 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7356 */
7357 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7358 if (IEM_IS_MODRM_REG_MODE(bRm))
7359 {
7360 /* greg32, MMX */
7361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7362 IEM_MC_BEGIN(0, 1);
7363 IEM_MC_LOCAL(uint32_t, u32Tmp);
7364
7365 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7366 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7367 IEM_MC_FPU_TO_MMX_MODE();
7368
7369 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7370 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7371
7372 IEM_MC_ADVANCE_RIP_AND_FINISH();
7373 IEM_MC_END();
7374 }
7375 else
7376 {
7377 /* [mem32], MMX */
7378 IEM_MC_BEGIN(0, 2);
7379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7380 IEM_MC_LOCAL(uint32_t, u32Tmp);
7381
7382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7384 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7385 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7386 IEM_MC_FPU_TO_MMX_MODE();
7387
7388 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7389 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7390
7391 IEM_MC_ADVANCE_RIP_AND_FINISH();
7392 IEM_MC_END();
7393 }
7394 }
7395}
7396
7397
7398FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7399{
7400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7401 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7402 {
7403 /**
7404 * @opcode 0x7e
7405 * @opcodesub rex.w=1
7406 * @oppfx 0x66
7407 * @opcpuid sse2
7408 * @opgroup og_sse2_simdint_datamove
7409 * @opxcpttype 5
7410 * @optest 64-bit / op1=1 op2=2 -> op1=2
7411 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7412 */
7413 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7414 if (IEM_IS_MODRM_REG_MODE(bRm))
7415 {
7416 /* greg64, XMM */
7417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7418 IEM_MC_BEGIN(0, 1);
7419 IEM_MC_LOCAL(uint64_t, u64Tmp);
7420
7421 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7422 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7423
7424 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7425 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7426
7427 IEM_MC_ADVANCE_RIP_AND_FINISH();
7428 IEM_MC_END();
7429 }
7430 else
7431 {
7432 /* [mem64], XMM */
7433 IEM_MC_BEGIN(0, 2);
7434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7435 IEM_MC_LOCAL(uint64_t, u64Tmp);
7436
7437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7439 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7440 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7441
7442 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7443 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7444
7445 IEM_MC_ADVANCE_RIP_AND_FINISH();
7446 IEM_MC_END();
7447 }
7448 }
7449 else
7450 {
7451 /**
7452 * @opdone
7453 * @opcode 0x7e
7454 * @opcodesub rex.w=0
7455 * @oppfx 0x66
7456 * @opcpuid sse2
7457 * @opgroup og_sse2_simdint_datamove
7458 * @opxcpttype 5
7459 * @opfunction iemOp_movd_q_Vy_Ey
7460 * @optest op1=1 op2=2 -> op1=2
7461 * @optest op1=0 op2=-42 -> op1=-42
7462 */
7463 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7464 if (IEM_IS_MODRM_REG_MODE(bRm))
7465 {
7466 /* greg32, XMM */
7467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7468 IEM_MC_BEGIN(0, 1);
7469 IEM_MC_LOCAL(uint32_t, u32Tmp);
7470
7471 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7472 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7473
7474 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7475 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7476
7477 IEM_MC_ADVANCE_RIP_AND_FINISH();
7478 IEM_MC_END();
7479 }
7480 else
7481 {
7482 /* [mem32], XMM */
7483 IEM_MC_BEGIN(0, 2);
7484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7485 IEM_MC_LOCAL(uint32_t, u32Tmp);
7486
7487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7489 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7490 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7491
7492 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7493 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7494
7495 IEM_MC_ADVANCE_RIP_AND_FINISH();
7496 IEM_MC_END();
7497 }
7498 }
7499}
7500
7501/**
7502 * @opcode 0x7e
7503 * @oppfx 0xf3
7504 * @opcpuid sse2
7505 * @opgroup og_sse2_pcksclr_datamove
7506 * @opxcpttype none
7507 * @optest op1=1 op2=2 -> op1=2
7508 * @optest op1=0 op2=-42 -> op1=-42
7509 */
7510FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7511{
7512 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7514 if (IEM_IS_MODRM_REG_MODE(bRm))
7515 {
7516 /*
7517 * XMM128, XMM64.
7518 */
7519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7520 IEM_MC_BEGIN(0, 2);
7521 IEM_MC_LOCAL(uint64_t, uSrc);
7522
7523 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7524 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7525
7526 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7527 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7528
7529 IEM_MC_ADVANCE_RIP_AND_FINISH();
7530 IEM_MC_END();
7531 }
7532 else
7533 {
7534 /*
7535 * XMM128, [mem64].
7536 */
7537 IEM_MC_BEGIN(0, 2);
7538 IEM_MC_LOCAL(uint64_t, uSrc);
7539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7540
7541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7543 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7544 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7545
7546 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7547 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7548
7549 IEM_MC_ADVANCE_RIP_AND_FINISH();
7550 IEM_MC_END();
7551 }
7552}
7553
7554/* Opcode 0xf2 0x0f 0x7e - invalid */
7555
7556
7557/** Opcode 0x0f 0x7f - movq Qq, Pq */
7558FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7559{
7560 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7562 if (IEM_IS_MODRM_REG_MODE(bRm))
7563 {
7564 /*
7565 * MMX, MMX.
7566 */
7567 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7568 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7570 IEM_MC_BEGIN(0, 1);
7571 IEM_MC_LOCAL(uint64_t, u64Tmp);
7572 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7573 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7574 IEM_MC_FPU_TO_MMX_MODE();
7575
7576 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7577 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7578
7579 IEM_MC_ADVANCE_RIP_AND_FINISH();
7580 IEM_MC_END();
7581 }
7582 else
7583 {
7584 /*
7585 * [mem64], MMX.
7586 */
7587 IEM_MC_BEGIN(0, 2);
7588 IEM_MC_LOCAL(uint64_t, u64Tmp);
7589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7590
7591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7593 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7594 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7595 IEM_MC_FPU_TO_MMX_MODE();
7596
7597 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7598 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7599
7600 IEM_MC_ADVANCE_RIP_AND_FINISH();
7601 IEM_MC_END();
7602 }
7603}
7604
7605/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7606FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7607{
7608 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7610 if (IEM_IS_MODRM_REG_MODE(bRm))
7611 {
7612 /*
7613 * XMM, XMM.
7614 */
7615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7616 IEM_MC_BEGIN(0, 0);
7617 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7618 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7619 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7620 IEM_GET_MODRM_REG(pVCpu, bRm));
7621 IEM_MC_ADVANCE_RIP_AND_FINISH();
7622 IEM_MC_END();
7623 }
7624 else
7625 {
7626 /*
7627 * [mem128], XMM.
7628 */
7629 IEM_MC_BEGIN(0, 2);
7630 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7632
7633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7635 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7637
7638 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7639 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7640
7641 IEM_MC_ADVANCE_RIP_AND_FINISH();
7642 IEM_MC_END();
7643 }
7644}
7645
7646/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7647FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7648{
7649 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7651 if (IEM_IS_MODRM_REG_MODE(bRm))
7652 {
7653 /*
7654 * XMM, XMM.
7655 */
7656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7657 IEM_MC_BEGIN(0, 0);
7658 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7659 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7660 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7661 IEM_GET_MODRM_REG(pVCpu, bRm));
7662 IEM_MC_ADVANCE_RIP_AND_FINISH();
7663 IEM_MC_END();
7664 }
7665 else
7666 {
7667 /*
7668 * [mem128], XMM.
7669 */
7670 IEM_MC_BEGIN(0, 2);
7671 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7673
7674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7676 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7678
7679 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7680 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7681
7682 IEM_MC_ADVANCE_RIP_AND_FINISH();
7683 IEM_MC_END();
7684 }
7685}
7686
7687/* Opcode 0xf2 0x0f 0x7f - invalid */
7688
7689
7690
7691/** Opcode 0x0f 0x80. */
7692FNIEMOP_DEF(iemOp_jo_Jv)
7693{
7694 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7695 IEMOP_HLP_MIN_386();
7696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7697 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7698 {
7699 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7701
7702 IEM_MC_BEGIN(0, 0);
7703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7704 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7705 } IEM_MC_ELSE() {
7706 IEM_MC_ADVANCE_RIP_AND_FINISH();
7707 } IEM_MC_ENDIF();
7708 IEM_MC_END();
7709 }
7710 else
7711 {
7712 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7714
7715 IEM_MC_BEGIN(0, 0);
7716 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7717 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7718 } IEM_MC_ELSE() {
7719 IEM_MC_ADVANCE_RIP_AND_FINISH();
7720 } IEM_MC_ENDIF();
7721 IEM_MC_END();
7722 }
7723}
7724
7725
7726/** Opcode 0x0f 0x81. */
7727FNIEMOP_DEF(iemOp_jno_Jv)
7728{
7729 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7730 IEMOP_HLP_MIN_386();
7731 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7732 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7733 {
7734 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7736
7737 IEM_MC_BEGIN(0, 0);
7738 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7739 IEM_MC_ADVANCE_RIP_AND_FINISH();
7740 } IEM_MC_ELSE() {
7741 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7742 } IEM_MC_ENDIF();
7743 IEM_MC_END();
7744 }
7745 else
7746 {
7747 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7749
7750 IEM_MC_BEGIN(0, 0);
7751 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7752 IEM_MC_ADVANCE_RIP_AND_FINISH();
7753 } IEM_MC_ELSE() {
7754 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7755 } IEM_MC_ENDIF();
7756 IEM_MC_END();
7757 }
7758}
7759
7760
7761/** Opcode 0x0f 0x82. */
7762FNIEMOP_DEF(iemOp_jc_Jv)
7763{
7764 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7765 IEMOP_HLP_MIN_386();
7766 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7767 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7768 {
7769 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7771
7772 IEM_MC_BEGIN(0, 0);
7773 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7774 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7775 } IEM_MC_ELSE() {
7776 IEM_MC_ADVANCE_RIP_AND_FINISH();
7777 } IEM_MC_ENDIF();
7778 IEM_MC_END();
7779 }
7780 else
7781 {
7782 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7784
7785 IEM_MC_BEGIN(0, 0);
7786 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7787 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7788 } IEM_MC_ELSE() {
7789 IEM_MC_ADVANCE_RIP_AND_FINISH();
7790 } IEM_MC_ENDIF();
7791 IEM_MC_END();
7792 }
7793}
7794
7795
7796/** Opcode 0x0f 0x83. */
7797FNIEMOP_DEF(iemOp_jnc_Jv)
7798{
7799 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7800 IEMOP_HLP_MIN_386();
7801 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7802 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7803 {
7804 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7806
7807 IEM_MC_BEGIN(0, 0);
7808 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7809 IEM_MC_ADVANCE_RIP_AND_FINISH();
7810 } IEM_MC_ELSE() {
7811 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7812 } IEM_MC_ENDIF();
7813 IEM_MC_END();
7814 }
7815 else
7816 {
7817 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7819
7820 IEM_MC_BEGIN(0, 0);
7821 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7822 IEM_MC_ADVANCE_RIP_AND_FINISH();
7823 } IEM_MC_ELSE() {
7824 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7825 } IEM_MC_ENDIF();
7826 IEM_MC_END();
7827 }
7828}
7829
7830
7831/** Opcode 0x0f 0x84. */
7832FNIEMOP_DEF(iemOp_je_Jv)
7833{
7834 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7835 IEMOP_HLP_MIN_386();
7836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7837 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7838 {
7839 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7841
7842 IEM_MC_BEGIN(0, 0);
7843 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7844 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7845 } IEM_MC_ELSE() {
7846 IEM_MC_ADVANCE_RIP_AND_FINISH();
7847 } IEM_MC_ENDIF();
7848 IEM_MC_END();
7849 }
7850 else
7851 {
7852 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7854
7855 IEM_MC_BEGIN(0, 0);
7856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7857 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7858 } IEM_MC_ELSE() {
7859 IEM_MC_ADVANCE_RIP_AND_FINISH();
7860 } IEM_MC_ENDIF();
7861 IEM_MC_END();
7862 }
7863}
7864
7865
7866/** Opcode 0x0f 0x85. */
7867FNIEMOP_DEF(iemOp_jne_Jv)
7868{
7869 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7870 IEMOP_HLP_MIN_386();
7871 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7872 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7873 {
7874 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7876
7877 IEM_MC_BEGIN(0, 0);
7878 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7879 IEM_MC_ADVANCE_RIP_AND_FINISH();
7880 } IEM_MC_ELSE() {
7881 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7882 } IEM_MC_ENDIF();
7883 IEM_MC_END();
7884 }
7885 else
7886 {
7887 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7889
7890 IEM_MC_BEGIN(0, 0);
7891 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7892 IEM_MC_ADVANCE_RIP_AND_FINISH();
7893 } IEM_MC_ELSE() {
7894 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7895 } IEM_MC_ENDIF();
7896 IEM_MC_END();
7897 }
7898}
7899
7900
7901/** Opcode 0x0f 0x86. */
7902FNIEMOP_DEF(iemOp_jbe_Jv)
7903{
7904 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7905 IEMOP_HLP_MIN_386();
7906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7907 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7908 {
7909 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7911
7912 IEM_MC_BEGIN(0, 0);
7913 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7914 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7915 } IEM_MC_ELSE() {
7916 IEM_MC_ADVANCE_RIP_AND_FINISH();
7917 } IEM_MC_ENDIF();
7918 IEM_MC_END();
7919 }
7920 else
7921 {
7922 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7924
7925 IEM_MC_BEGIN(0, 0);
7926 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7927 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7928 } IEM_MC_ELSE() {
7929 IEM_MC_ADVANCE_RIP_AND_FINISH();
7930 } IEM_MC_ENDIF();
7931 IEM_MC_END();
7932 }
7933}
7934
7935
7936/** Opcode 0x0f 0x87. */
7937FNIEMOP_DEF(iemOp_jnbe_Jv)
7938{
7939 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7940 IEMOP_HLP_MIN_386();
7941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7942 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7943 {
7944 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7946
7947 IEM_MC_BEGIN(0, 0);
7948 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7949 IEM_MC_ADVANCE_RIP_AND_FINISH();
7950 } IEM_MC_ELSE() {
7951 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7952 } IEM_MC_ENDIF();
7953 IEM_MC_END();
7954 }
7955 else
7956 {
7957 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7959
7960 IEM_MC_BEGIN(0, 0);
7961 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7962 IEM_MC_ADVANCE_RIP_AND_FINISH();
7963 } IEM_MC_ELSE() {
7964 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7965 } IEM_MC_ENDIF();
7966 IEM_MC_END();
7967 }
7968}
7969
7970
7971/** Opcode 0x0f 0x88. */
7972FNIEMOP_DEF(iemOp_js_Jv)
7973{
7974 IEMOP_MNEMONIC(js_Jv, "js Jv");
7975 IEMOP_HLP_MIN_386();
7976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7977 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7978 {
7979 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7981
7982 IEM_MC_BEGIN(0, 0);
7983 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7984 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7985 } IEM_MC_ELSE() {
7986 IEM_MC_ADVANCE_RIP_AND_FINISH();
7987 } IEM_MC_ENDIF();
7988 IEM_MC_END();
7989 }
7990 else
7991 {
7992 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7994
7995 IEM_MC_BEGIN(0, 0);
7996 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7997 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7998 } IEM_MC_ELSE() {
7999 IEM_MC_ADVANCE_RIP_AND_FINISH();
8000 } IEM_MC_ENDIF();
8001 IEM_MC_END();
8002 }
8003}
8004
8005
8006/** Opcode 0x0f 0x89. */
8007FNIEMOP_DEF(iemOp_jns_Jv)
8008{
8009 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8010 IEMOP_HLP_MIN_386();
8011 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8012 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8013 {
8014 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8016
8017 IEM_MC_BEGIN(0, 0);
8018 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8019 IEM_MC_ADVANCE_RIP_AND_FINISH();
8020 } IEM_MC_ELSE() {
8021 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8022 } IEM_MC_ENDIF();
8023 IEM_MC_END();
8024 }
8025 else
8026 {
8027 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8029
8030 IEM_MC_BEGIN(0, 0);
8031 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8032 IEM_MC_ADVANCE_RIP_AND_FINISH();
8033 } IEM_MC_ELSE() {
8034 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8035 } IEM_MC_ENDIF();
8036 IEM_MC_END();
8037 }
8038}
8039
8040
8041/** Opcode 0x0f 0x8a. */
8042FNIEMOP_DEF(iemOp_jp_Jv)
8043{
8044 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8045 IEMOP_HLP_MIN_386();
8046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8047 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8048 {
8049 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8051
8052 IEM_MC_BEGIN(0, 0);
8053 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8054 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8055 } IEM_MC_ELSE() {
8056 IEM_MC_ADVANCE_RIP_AND_FINISH();
8057 } IEM_MC_ENDIF();
8058 IEM_MC_END();
8059 }
8060 else
8061 {
8062 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8064
8065 IEM_MC_BEGIN(0, 0);
8066 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8067 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8068 } IEM_MC_ELSE() {
8069 IEM_MC_ADVANCE_RIP_AND_FINISH();
8070 } IEM_MC_ENDIF();
8071 IEM_MC_END();
8072 }
8073}
8074
8075
8076/** Opcode 0x0f 0x8b. */
8077FNIEMOP_DEF(iemOp_jnp_Jv)
8078{
8079 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8080 IEMOP_HLP_MIN_386();
8081 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8082 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8083 {
8084 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8086
8087 IEM_MC_BEGIN(0, 0);
8088 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8089 IEM_MC_ADVANCE_RIP_AND_FINISH();
8090 } IEM_MC_ELSE() {
8091 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8092 } IEM_MC_ENDIF();
8093 IEM_MC_END();
8094 }
8095 else
8096 {
8097 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8099
8100 IEM_MC_BEGIN(0, 0);
8101 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8102 IEM_MC_ADVANCE_RIP_AND_FINISH();
8103 } IEM_MC_ELSE() {
8104 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8105 } IEM_MC_ENDIF();
8106 IEM_MC_END();
8107 }
8108}
8109
8110
8111/** Opcode 0x0f 0x8c. */
8112FNIEMOP_DEF(iemOp_jl_Jv)
8113{
8114 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8115 IEMOP_HLP_MIN_386();
8116 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8117 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8118 {
8119 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8121
8122 IEM_MC_BEGIN(0, 0);
8123 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8124 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8125 } IEM_MC_ELSE() {
8126 IEM_MC_ADVANCE_RIP_AND_FINISH();
8127 } IEM_MC_ENDIF();
8128 IEM_MC_END();
8129 }
8130 else
8131 {
8132 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8134
8135 IEM_MC_BEGIN(0, 0);
8136 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8137 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8138 } IEM_MC_ELSE() {
8139 IEM_MC_ADVANCE_RIP_AND_FINISH();
8140 } IEM_MC_ENDIF();
8141 IEM_MC_END();
8142 }
8143}
8144
8145
8146/** Opcode 0x0f 0x8d. */
8147FNIEMOP_DEF(iemOp_jnl_Jv)
8148{
8149 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8150 IEMOP_HLP_MIN_386();
8151 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8152 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8153 {
8154 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8156
8157 IEM_MC_BEGIN(0, 0);
8158 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8159 IEM_MC_ADVANCE_RIP_AND_FINISH();
8160 } IEM_MC_ELSE() {
8161 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8162 } IEM_MC_ENDIF();
8163 IEM_MC_END();
8164 }
8165 else
8166 {
8167 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8169
8170 IEM_MC_BEGIN(0, 0);
8171 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8172 IEM_MC_ADVANCE_RIP_AND_FINISH();
8173 } IEM_MC_ELSE() {
8174 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8175 } IEM_MC_ENDIF();
8176 IEM_MC_END();
8177 }
8178}
8179
8180
8181/** Opcode 0x0f 0x8e. */
8182FNIEMOP_DEF(iemOp_jle_Jv)
8183{
8184 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8185 IEMOP_HLP_MIN_386();
8186 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8187 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8188 {
8189 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8191
8192 IEM_MC_BEGIN(0, 0);
8193 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8194 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8195 } IEM_MC_ELSE() {
8196 IEM_MC_ADVANCE_RIP_AND_FINISH();
8197 } IEM_MC_ENDIF();
8198 IEM_MC_END();
8199 }
8200 else
8201 {
8202 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8204
8205 IEM_MC_BEGIN(0, 0);
8206 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8207 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8208 } IEM_MC_ELSE() {
8209 IEM_MC_ADVANCE_RIP_AND_FINISH();
8210 } IEM_MC_ENDIF();
8211 IEM_MC_END();
8212 }
8213}
8214
8215
8216/** Opcode 0x0f 0x8f. */
8217FNIEMOP_DEF(iemOp_jnle_Jv)
8218{
8219 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8220 IEMOP_HLP_MIN_386();
8221 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8222 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8223 {
8224 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8226
8227 IEM_MC_BEGIN(0, 0);
8228 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8229 IEM_MC_ADVANCE_RIP_AND_FINISH();
8230 } IEM_MC_ELSE() {
8231 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8232 } IEM_MC_ENDIF();
8233 IEM_MC_END();
8234 }
8235 else
8236 {
8237 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8239
8240 IEM_MC_BEGIN(0, 0);
8241 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8242 IEM_MC_ADVANCE_RIP_AND_FINISH();
8243 } IEM_MC_ELSE() {
8244 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8245 } IEM_MC_ENDIF();
8246 IEM_MC_END();
8247 }
8248}
8249
8250
8251/** Opcode 0x0f 0x90. */
8252FNIEMOP_DEF(iemOp_seto_Eb)
8253{
8254 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8255 IEMOP_HLP_MIN_386();
8256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8257
8258 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8259 * any way. AMD says it's "unused", whatever that means. We're
8260 * ignoring for now. */
8261 if (IEM_IS_MODRM_REG_MODE(bRm))
8262 {
8263 /* register target */
8264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8265 IEM_MC_BEGIN(0, 0);
8266 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8267 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8268 } IEM_MC_ELSE() {
8269 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8270 } IEM_MC_ENDIF();
8271 IEM_MC_ADVANCE_RIP_AND_FINISH();
8272 IEM_MC_END();
8273 }
8274 else
8275 {
8276 /* memory target */
8277 IEM_MC_BEGIN(0, 1);
8278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8281 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8282 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8283 } IEM_MC_ELSE() {
8284 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8285 } IEM_MC_ENDIF();
8286 IEM_MC_ADVANCE_RIP_AND_FINISH();
8287 IEM_MC_END();
8288 }
8289}
8290
8291
8292/** Opcode 0x0f 0x91. */
8293FNIEMOP_DEF(iemOp_setno_Eb)
8294{
8295 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8296 IEMOP_HLP_MIN_386();
8297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8298
8299 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8300 * any way. AMD says it's "unused", whatever that means. We're
8301 * ignoring for now. */
8302 if (IEM_IS_MODRM_REG_MODE(bRm))
8303 {
8304 /* register target */
8305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8306 IEM_MC_BEGIN(0, 0);
8307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8308 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8309 } IEM_MC_ELSE() {
8310 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8311 } IEM_MC_ENDIF();
8312 IEM_MC_ADVANCE_RIP_AND_FINISH();
8313 IEM_MC_END();
8314 }
8315 else
8316 {
8317 /* memory target */
8318 IEM_MC_BEGIN(0, 1);
8319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8322 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8323 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8324 } IEM_MC_ELSE() {
8325 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8326 } IEM_MC_ENDIF();
8327 IEM_MC_ADVANCE_RIP_AND_FINISH();
8328 IEM_MC_END();
8329 }
8330}
8331
8332
8333/** Opcode 0x0f 0x92. */
8334FNIEMOP_DEF(iemOp_setc_Eb)
8335{
8336 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8337 IEMOP_HLP_MIN_386();
8338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8339
8340 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8341 * any way. AMD says it's "unused", whatever that means. We're
8342 * ignoring for now. */
8343 if (IEM_IS_MODRM_REG_MODE(bRm))
8344 {
8345 /* register target */
8346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8347 IEM_MC_BEGIN(0, 0);
8348 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8349 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8350 } IEM_MC_ELSE() {
8351 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8352 } IEM_MC_ENDIF();
8353 IEM_MC_ADVANCE_RIP_AND_FINISH();
8354 IEM_MC_END();
8355 }
8356 else
8357 {
8358 /* memory target */
8359 IEM_MC_BEGIN(0, 1);
8360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8363 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8364 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8365 } IEM_MC_ELSE() {
8366 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8367 } IEM_MC_ENDIF();
8368 IEM_MC_ADVANCE_RIP_AND_FINISH();
8369 IEM_MC_END();
8370 }
8371}
8372
8373
8374/** Opcode 0x0f 0x93. */
8375FNIEMOP_DEF(iemOp_setnc_Eb)
8376{
8377 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8378 IEMOP_HLP_MIN_386();
8379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8380
8381 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8382 * any way. AMD says it's "unused", whatever that means. We're
8383 * ignoring for now. */
8384 if (IEM_IS_MODRM_REG_MODE(bRm))
8385 {
8386 /* register target */
8387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8388 IEM_MC_BEGIN(0, 0);
8389 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8390 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8391 } IEM_MC_ELSE() {
8392 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8393 } IEM_MC_ENDIF();
8394 IEM_MC_ADVANCE_RIP_AND_FINISH();
8395 IEM_MC_END();
8396 }
8397 else
8398 {
8399 /* memory target */
8400 IEM_MC_BEGIN(0, 1);
8401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8404 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8405 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8406 } IEM_MC_ELSE() {
8407 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8408 } IEM_MC_ENDIF();
8409 IEM_MC_ADVANCE_RIP_AND_FINISH();
8410 IEM_MC_END();
8411 }
8412}
8413
8414
8415/** Opcode 0x0f 0x94. */
8416FNIEMOP_DEF(iemOp_sete_Eb)
8417{
8418 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8419 IEMOP_HLP_MIN_386();
8420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8421
8422 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8423 * any way. AMD says it's "unused", whatever that means. We're
8424 * ignoring for now. */
8425 if (IEM_IS_MODRM_REG_MODE(bRm))
8426 {
8427 /* register target */
8428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8429 IEM_MC_BEGIN(0, 0);
8430 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8431 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8432 } IEM_MC_ELSE() {
8433 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8434 } IEM_MC_ENDIF();
8435 IEM_MC_ADVANCE_RIP_AND_FINISH();
8436 IEM_MC_END();
8437 }
8438 else
8439 {
8440 /* memory target */
8441 IEM_MC_BEGIN(0, 1);
8442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8445 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8446 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8447 } IEM_MC_ELSE() {
8448 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8449 } IEM_MC_ENDIF();
8450 IEM_MC_ADVANCE_RIP_AND_FINISH();
8451 IEM_MC_END();
8452 }
8453}
8454
8455
8456/** Opcode 0x0f 0x95. */
8457FNIEMOP_DEF(iemOp_setne_Eb)
8458{
8459 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8460 IEMOP_HLP_MIN_386();
8461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8462
8463 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8464 * any way. AMD says it's "unused", whatever that means. We're
8465 * ignoring for now. */
8466 if (IEM_IS_MODRM_REG_MODE(bRm))
8467 {
8468 /* register target */
8469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8470 IEM_MC_BEGIN(0, 0);
8471 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8472 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8473 } IEM_MC_ELSE() {
8474 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8475 } IEM_MC_ENDIF();
8476 IEM_MC_ADVANCE_RIP_AND_FINISH();
8477 IEM_MC_END();
8478 }
8479 else
8480 {
8481 /* memory target */
8482 IEM_MC_BEGIN(0, 1);
8483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8486 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8487 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8488 } IEM_MC_ELSE() {
8489 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8490 } IEM_MC_ENDIF();
8491 IEM_MC_ADVANCE_RIP_AND_FINISH();
8492 IEM_MC_END();
8493 }
8494}
8495
8496
8497/** Opcode 0x0f 0x96. */
8498FNIEMOP_DEF(iemOp_setbe_Eb)
8499{
8500 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8501 IEMOP_HLP_MIN_386();
8502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8503
8504 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8505 * any way. AMD says it's "unused", whatever that means. We're
8506 * ignoring for now. */
8507 if (IEM_IS_MODRM_REG_MODE(bRm))
8508 {
8509 /* register target */
8510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8511 IEM_MC_BEGIN(0, 0);
8512 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8513 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8514 } IEM_MC_ELSE() {
8515 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8516 } IEM_MC_ENDIF();
8517 IEM_MC_ADVANCE_RIP_AND_FINISH();
8518 IEM_MC_END();
8519 }
8520 else
8521 {
8522 /* memory target */
8523 IEM_MC_BEGIN(0, 1);
8524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8527 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8528 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8529 } IEM_MC_ELSE() {
8530 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8531 } IEM_MC_ENDIF();
8532 IEM_MC_ADVANCE_RIP_AND_FINISH();
8533 IEM_MC_END();
8534 }
8535}
8536
8537
8538/** Opcode 0x0f 0x97. */
8539FNIEMOP_DEF(iemOp_setnbe_Eb)
8540{
8541 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8542 IEMOP_HLP_MIN_386();
8543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8544
8545 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8546 * any way. AMD says it's "unused", whatever that means. We're
8547 * ignoring for now. */
8548 if (IEM_IS_MODRM_REG_MODE(bRm))
8549 {
8550 /* register target */
8551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8552 IEM_MC_BEGIN(0, 0);
8553 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8554 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8555 } IEM_MC_ELSE() {
8556 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8557 } IEM_MC_ENDIF();
8558 IEM_MC_ADVANCE_RIP_AND_FINISH();
8559 IEM_MC_END();
8560 }
8561 else
8562 {
8563 /* memory target */
8564 IEM_MC_BEGIN(0, 1);
8565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8568 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8569 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8570 } IEM_MC_ELSE() {
8571 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8572 } IEM_MC_ENDIF();
8573 IEM_MC_ADVANCE_RIP_AND_FINISH();
8574 IEM_MC_END();
8575 }
8576}
8577
8578
8579/** Opcode 0x0f 0x98. */
8580FNIEMOP_DEF(iemOp_sets_Eb)
8581{
8582 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8583 IEMOP_HLP_MIN_386();
8584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8585
8586 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8587 * any way. AMD says it's "unused", whatever that means. We're
8588 * ignoring for now. */
8589 if (IEM_IS_MODRM_REG_MODE(bRm))
8590 {
8591 /* register target */
8592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8593 IEM_MC_BEGIN(0, 0);
8594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8595 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8596 } IEM_MC_ELSE() {
8597 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8598 } IEM_MC_ENDIF();
8599 IEM_MC_ADVANCE_RIP_AND_FINISH();
8600 IEM_MC_END();
8601 }
8602 else
8603 {
8604 /* memory target */
8605 IEM_MC_BEGIN(0, 1);
8606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8610 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8611 } IEM_MC_ELSE() {
8612 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8613 } IEM_MC_ENDIF();
8614 IEM_MC_ADVANCE_RIP_AND_FINISH();
8615 IEM_MC_END();
8616 }
8617}
8618
8619
8620/** Opcode 0x0f 0x99. */
8621FNIEMOP_DEF(iemOp_setns_Eb)
8622{
8623 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8624 IEMOP_HLP_MIN_386();
8625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8626
8627 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8628 * any way. AMD says it's "unused", whatever that means. We're
8629 * ignoring for now. */
8630 if (IEM_IS_MODRM_REG_MODE(bRm))
8631 {
8632 /* register target */
8633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8634 IEM_MC_BEGIN(0, 0);
8635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8636 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8637 } IEM_MC_ELSE() {
8638 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8639 } IEM_MC_ENDIF();
8640 IEM_MC_ADVANCE_RIP_AND_FINISH();
8641 IEM_MC_END();
8642 }
8643 else
8644 {
8645 /* memory target */
8646 IEM_MC_BEGIN(0, 1);
8647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8650 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8651 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8652 } IEM_MC_ELSE() {
8653 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8654 } IEM_MC_ENDIF();
8655 IEM_MC_ADVANCE_RIP_AND_FINISH();
8656 IEM_MC_END();
8657 }
8658}
8659
8660
8661/** Opcode 0x0f 0x9a. */
8662FNIEMOP_DEF(iemOp_setp_Eb)
8663{
8664 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8665 IEMOP_HLP_MIN_386();
8666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8667
8668 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8669 * any way. AMD says it's "unused", whatever that means. We're
8670 * ignoring for now. */
8671 if (IEM_IS_MODRM_REG_MODE(bRm))
8672 {
8673 /* register target */
8674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8675 IEM_MC_BEGIN(0, 0);
8676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8677 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8678 } IEM_MC_ELSE() {
8679 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8680 } IEM_MC_ENDIF();
8681 IEM_MC_ADVANCE_RIP_AND_FINISH();
8682 IEM_MC_END();
8683 }
8684 else
8685 {
8686 /* memory target */
8687 IEM_MC_BEGIN(0, 1);
8688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8691 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8692 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8693 } IEM_MC_ELSE() {
8694 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8695 } IEM_MC_ENDIF();
8696 IEM_MC_ADVANCE_RIP_AND_FINISH();
8697 IEM_MC_END();
8698 }
8699}
8700
8701
8702/** Opcode 0x0f 0x9b. */
8703FNIEMOP_DEF(iemOp_setnp_Eb)
8704{
8705 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8706 IEMOP_HLP_MIN_386();
8707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8708
8709 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8710 * any way. AMD says it's "unused", whatever that means. We're
8711 * ignoring for now. */
8712 if (IEM_IS_MODRM_REG_MODE(bRm))
8713 {
8714 /* register target */
8715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8716 IEM_MC_BEGIN(0, 0);
8717 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8718 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8719 } IEM_MC_ELSE() {
8720 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8721 } IEM_MC_ENDIF();
8722 IEM_MC_ADVANCE_RIP_AND_FINISH();
8723 IEM_MC_END();
8724 }
8725 else
8726 {
8727 /* memory target */
8728 IEM_MC_BEGIN(0, 1);
8729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8732 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8733 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8734 } IEM_MC_ELSE() {
8735 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8736 } IEM_MC_ENDIF();
8737 IEM_MC_ADVANCE_RIP_AND_FINISH();
8738 IEM_MC_END();
8739 }
8740}
8741
8742
8743/** Opcode 0x0f 0x9c. */
8744FNIEMOP_DEF(iemOp_setl_Eb)
8745{
8746 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8747 IEMOP_HLP_MIN_386();
8748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8749
8750 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8751 * any way. AMD says it's "unused", whatever that means. We're
8752 * ignoring for now. */
8753 if (IEM_IS_MODRM_REG_MODE(bRm))
8754 {
8755 /* register target */
8756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8757 IEM_MC_BEGIN(0, 0);
8758 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8759 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8760 } IEM_MC_ELSE() {
8761 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8762 } IEM_MC_ENDIF();
8763 IEM_MC_ADVANCE_RIP_AND_FINISH();
8764 IEM_MC_END();
8765 }
8766 else
8767 {
8768 /* memory target */
8769 IEM_MC_BEGIN(0, 1);
8770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8773 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8774 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8775 } IEM_MC_ELSE() {
8776 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8777 } IEM_MC_ENDIF();
8778 IEM_MC_ADVANCE_RIP_AND_FINISH();
8779 IEM_MC_END();
8780 }
8781}
8782
8783
8784/** Opcode 0x0f 0x9d. */
8785FNIEMOP_DEF(iemOp_setnl_Eb)
8786{
8787 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8788 IEMOP_HLP_MIN_386();
8789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8790
8791 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8792 * any way. AMD says it's "unused", whatever that means. We're
8793 * ignoring for now. */
8794 if (IEM_IS_MODRM_REG_MODE(bRm))
8795 {
8796 /* register target */
8797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8798 IEM_MC_BEGIN(0, 0);
8799 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8800 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8801 } IEM_MC_ELSE() {
8802 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8803 } IEM_MC_ENDIF();
8804 IEM_MC_ADVANCE_RIP_AND_FINISH();
8805 IEM_MC_END();
8806 }
8807 else
8808 {
8809 /* memory target */
8810 IEM_MC_BEGIN(0, 1);
8811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8814 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8815 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8816 } IEM_MC_ELSE() {
8817 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8818 } IEM_MC_ENDIF();
8819 IEM_MC_ADVANCE_RIP_AND_FINISH();
8820 IEM_MC_END();
8821 }
8822}
8823
8824
8825/** Opcode 0x0f 0x9e. */
8826FNIEMOP_DEF(iemOp_setle_Eb)
8827{
8828 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8829 IEMOP_HLP_MIN_386();
8830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8831
8832 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8833 * any way. AMD says it's "unused", whatever that means. We're
8834 * ignoring for now. */
8835 if (IEM_IS_MODRM_REG_MODE(bRm))
8836 {
8837 /* register target */
8838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8839 IEM_MC_BEGIN(0, 0);
8840 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8841 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8842 } IEM_MC_ELSE() {
8843 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8844 } IEM_MC_ENDIF();
8845 IEM_MC_ADVANCE_RIP_AND_FINISH();
8846 IEM_MC_END();
8847 }
8848 else
8849 {
8850 /* memory target */
8851 IEM_MC_BEGIN(0, 1);
8852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8855 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8856 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8857 } IEM_MC_ELSE() {
8858 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8859 } IEM_MC_ENDIF();
8860 IEM_MC_ADVANCE_RIP_AND_FINISH();
8861 IEM_MC_END();
8862 }
8863}
8864
8865
8866/** Opcode 0x0f 0x9f. */
8867FNIEMOP_DEF(iemOp_setnle_Eb)
8868{
8869 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8870 IEMOP_HLP_MIN_386();
8871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8872
8873 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8874 * any way. AMD says it's "unused", whatever that means. We're
8875 * ignoring for now. */
8876 if (IEM_IS_MODRM_REG_MODE(bRm))
8877 {
8878 /* register target */
8879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8880 IEM_MC_BEGIN(0, 0);
8881 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8882 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8883 } IEM_MC_ELSE() {
8884 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8885 } IEM_MC_ENDIF();
8886 IEM_MC_ADVANCE_RIP_AND_FINISH();
8887 IEM_MC_END();
8888 }
8889 else
8890 {
8891 /* memory target */
8892 IEM_MC_BEGIN(0, 1);
8893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8896 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8897 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8898 } IEM_MC_ELSE() {
8899 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8900 } IEM_MC_ENDIF();
8901 IEM_MC_ADVANCE_RIP_AND_FINISH();
8902 IEM_MC_END();
8903 }
8904}
8905
8906
8907/**
8908 * Common 'push segment-register' helper.
8909 */
8910FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8911{
8912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8913 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8914 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8915
8916 switch (pVCpu->iem.s.enmEffOpSize)
8917 {
8918 case IEMMODE_16BIT:
8919 IEM_MC_BEGIN(0, 1);
8920 IEM_MC_LOCAL(uint16_t, u16Value);
8921 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8922 IEM_MC_PUSH_U16(u16Value);
8923 IEM_MC_ADVANCE_RIP_AND_FINISH();
8924 IEM_MC_END();
8925 break;
8926
8927 case IEMMODE_32BIT:
8928 IEM_MC_BEGIN(0, 1);
8929 IEM_MC_LOCAL(uint32_t, u32Value);
8930 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8931 IEM_MC_PUSH_U32_SREG(u32Value);
8932 IEM_MC_ADVANCE_RIP_AND_FINISH();
8933 IEM_MC_END();
8934 break;
8935
8936 case IEMMODE_64BIT:
8937 IEM_MC_BEGIN(0, 1);
8938 IEM_MC_LOCAL(uint64_t, u64Value);
8939 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8940 IEM_MC_PUSH_U64(u64Value);
8941 IEM_MC_ADVANCE_RIP_AND_FINISH();
8942 IEM_MC_END();
8943 break;
8944
8945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8946 }
8947}
8948
8949
8950/** Opcode 0x0f 0xa0. */
8951FNIEMOP_DEF(iemOp_push_fs)
8952{
8953 IEMOP_MNEMONIC(push_fs, "push fs");
8954 IEMOP_HLP_MIN_386();
8955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8956 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8957}
8958
8959
8960/** Opcode 0x0f 0xa1. */
8961FNIEMOP_DEF(iemOp_pop_fs)
8962{
8963 IEMOP_MNEMONIC(pop_fs, "pop fs");
8964 IEMOP_HLP_MIN_386();
8965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8966 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8967}
8968
8969
8970/** Opcode 0x0f 0xa2. */
8971FNIEMOP_DEF(iemOp_cpuid)
8972{
8973 IEMOP_MNEMONIC(cpuid, "cpuid");
8974 IEMOP_HLP_MIN_486(); /* not all 486es. */
8975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8976 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
8977}
8978
8979
8980/**
8981 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8982 * iemOp_bts_Ev_Gv.
8983 */
8984#define IEMOP_BODY_BIT_Ev_Gv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
8985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8986 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8987 \
8988 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8989 { \
8990 /* register destination. */ \
8991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8992 switch (pVCpu->iem.s.enmEffOpSize) \
8993 { \
8994 case IEMMODE_16BIT: \
8995 IEM_MC_BEGIN(3, 0); \
8996 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8997 IEM_MC_ARG(uint16_t, u16Src, 1); \
8998 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8999 \
9000 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9001 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9002 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9003 IEM_MC_REF_EFLAGS(pEFlags); \
9004 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9005 \
9006 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9007 IEM_MC_END(); \
9008 break; \
9009 \
9010 case IEMMODE_32BIT: \
9011 IEM_MC_BEGIN(3, 0); \
9012 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9013 IEM_MC_ARG(uint32_t, u32Src, 1); \
9014 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9015 \
9016 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9017 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9018 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9019 IEM_MC_REF_EFLAGS(pEFlags); \
9020 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9021 \
9022 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
9023 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9024 IEM_MC_END(); \
9025 break; \
9026 \
9027 case IEMMODE_64BIT: \
9028 IEM_MC_BEGIN(3, 0); \
9029 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9030 IEM_MC_ARG(uint64_t, u64Src, 1); \
9031 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9032 \
9033 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9034 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9035 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9036 IEM_MC_REF_EFLAGS(pEFlags); \
9037 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9038 \
9039 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9040 IEM_MC_END(); \
9041 break; \
9042 \
9043 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9044 } \
9045 } \
9046 else \
9047 { \
9048 /* memory destination. */ \
9049 /** @todo test negative bit offsets! */ \
9050 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9051 { \
9052 switch (pVCpu->iem.s.enmEffOpSize) \
9053 { \
9054 case IEMMODE_16BIT: \
9055 IEM_MC_BEGIN(3, 2); \
9056 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9057 IEM_MC_ARG(uint16_t, u16Src, 1); \
9058 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9060 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9061 \
9062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9063 IEMOP_HLP_DONE_DECODING(); \
9064 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9065 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9066 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9067 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9068 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9069 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9070 IEM_MC_FETCH_EFLAGS(EFlags); \
9071 \
9072 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9073 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9074 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
9075 \
9076 IEM_MC_COMMIT_EFLAGS(EFlags); \
9077 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9078 IEM_MC_END(); \
9079 break; \
9080 \
9081 case IEMMODE_32BIT: \
9082 IEM_MC_BEGIN(3, 2); \
9083 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9084 IEM_MC_ARG(uint32_t, u32Src, 1); \
9085 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9087 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9088 \
9089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9090 IEMOP_HLP_DONE_DECODING(); \
9091 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9092 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9093 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9094 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9095 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9096 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9097 IEM_MC_FETCH_EFLAGS(EFlags); \
9098 \
9099 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9100 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9101 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
9102 \
9103 IEM_MC_COMMIT_EFLAGS(EFlags); \
9104 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9105 IEM_MC_END(); \
9106 break; \
9107 \
9108 case IEMMODE_64BIT: \
9109 IEM_MC_BEGIN(3, 2); \
9110 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9111 IEM_MC_ARG(uint64_t, u64Src, 1); \
9112 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9114 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9115 \
9116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9117 IEMOP_HLP_DONE_DECODING(); \
9118 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9119 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9120 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9121 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9122 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9123 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9124 IEM_MC_FETCH_EFLAGS(EFlags); \
9125 \
9126 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9127 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9128 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
9129 \
9130 IEM_MC_COMMIT_EFLAGS(EFlags); \
9131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9132 IEM_MC_END(); \
9133 break; \
9134 \
9135 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9136 } \
9137 } \
9138 else \
9139 { \
9140 (void)0
9141
9142#define IEMOP_BODY_BIT_Ev_Gv_NO_LOCK() \
9143 IEMOP_HLP_DONE_DECODING(); \
9144 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
9145 } \
9146 } \
9147 (void)0
9148
9149#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9150 switch (pVCpu->iem.s.enmEffOpSize) \
9151 { \
9152 case IEMMODE_16BIT: \
9153 IEM_MC_BEGIN(3, 2); \
9154 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9155 IEM_MC_ARG(uint16_t, u16Src, 1); \
9156 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9158 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9159 \
9160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9161 IEMOP_HLP_DONE_DECODING(); \
9162 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9163 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9164 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9165 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9166 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9167 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9168 IEM_MC_FETCH_EFLAGS(EFlags); \
9169 \
9170 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9171 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9172 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
9173 \
9174 IEM_MC_COMMIT_EFLAGS(EFlags); \
9175 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9176 IEM_MC_END(); \
9177 break; \
9178 \
9179 case IEMMODE_32BIT: \
9180 IEM_MC_BEGIN(3, 2); \
9181 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9182 IEM_MC_ARG(uint32_t, u32Src, 1); \
9183 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9185 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9186 \
9187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9188 IEMOP_HLP_DONE_DECODING(); \
9189 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9190 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9191 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9192 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9193 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9194 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9195 IEM_MC_FETCH_EFLAGS(EFlags); \
9196 \
9197 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9198 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9199 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
9200 \
9201 IEM_MC_COMMIT_EFLAGS(EFlags); \
9202 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9203 IEM_MC_END(); \
9204 break; \
9205 \
9206 case IEMMODE_64BIT: \
9207 IEM_MC_BEGIN(3, 2); \
9208 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9209 IEM_MC_ARG(uint64_t, u64Src, 1); \
9210 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9212 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9213 \
9214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9215 IEMOP_HLP_DONE_DECODING(); \
9216 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9217 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9218 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9219 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9220 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9221 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9222 IEM_MC_FETCH_EFLAGS(EFlags); \
9223 \
9224 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9225 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9226 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
9227 \
9228 IEM_MC_COMMIT_EFLAGS(EFlags); \
9229 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9230 IEM_MC_END(); \
9231 break; \
9232 \
9233 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9234 } \
9235 } \
9236 } \
9237 (void)0
9238
9239
9240/** Opcode 0x0f 0xa3. */
9241FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9242{
9243 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9244 IEMOP_HLP_MIN_386();
9245 IEMOP_BODY_BIT_Ev_Gv(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64, IEM_ACCESS_DATA_R);
9246 IEMOP_BODY_BIT_Ev_Gv_NO_LOCK();
9247}
9248
9249
9250/**
9251 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9252 */
9253FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9254{
9255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9256 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9257
9258 if (IEM_IS_MODRM_REG_MODE(bRm))
9259 {
9260 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9262
9263 switch (pVCpu->iem.s.enmEffOpSize)
9264 {
9265 case IEMMODE_16BIT:
9266 IEM_MC_BEGIN(4, 0);
9267 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9268 IEM_MC_ARG(uint16_t, u16Src, 1);
9269 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9270 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9271
9272 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9273 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9274 IEM_MC_REF_EFLAGS(pEFlags);
9275 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9276
9277 IEM_MC_ADVANCE_RIP_AND_FINISH();
9278 IEM_MC_END();
9279 break;
9280
9281 case IEMMODE_32BIT:
9282 IEM_MC_BEGIN(4, 0);
9283 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9284 IEM_MC_ARG(uint32_t, u32Src, 1);
9285 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9286 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9287
9288 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9289 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9290 IEM_MC_REF_EFLAGS(pEFlags);
9291 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9292
9293 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9294 IEM_MC_ADVANCE_RIP_AND_FINISH();
9295 IEM_MC_END();
9296 break;
9297
9298 case IEMMODE_64BIT:
9299 IEM_MC_BEGIN(4, 0);
9300 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9301 IEM_MC_ARG(uint64_t, u64Src, 1);
9302 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9303 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9304
9305 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9306 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9307 IEM_MC_REF_EFLAGS(pEFlags);
9308 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9309
9310 IEM_MC_ADVANCE_RIP_AND_FINISH();
9311 IEM_MC_END();
9312 break;
9313
9314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9315 }
9316 }
9317 else
9318 {
9319 switch (pVCpu->iem.s.enmEffOpSize)
9320 {
9321 case IEMMODE_16BIT:
9322 IEM_MC_BEGIN(4, 2);
9323 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9324 IEM_MC_ARG(uint16_t, u16Src, 1);
9325 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9326 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9328
9329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9330 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9331 IEM_MC_ASSIGN(cShiftArg, cShift);
9332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9333 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9334 IEM_MC_FETCH_EFLAGS(EFlags);
9335 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9336 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9337
9338 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9339 IEM_MC_COMMIT_EFLAGS(EFlags);
9340 IEM_MC_ADVANCE_RIP_AND_FINISH();
9341 IEM_MC_END();
9342 break;
9343
9344 case IEMMODE_32BIT:
9345 IEM_MC_BEGIN(4, 2);
9346 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9347 IEM_MC_ARG(uint32_t, u32Src, 1);
9348 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9349 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9351
9352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9353 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9354 IEM_MC_ASSIGN(cShiftArg, cShift);
9355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9356 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9357 IEM_MC_FETCH_EFLAGS(EFlags);
9358 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9359 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9360
9361 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9362 IEM_MC_COMMIT_EFLAGS(EFlags);
9363 IEM_MC_ADVANCE_RIP_AND_FINISH();
9364 IEM_MC_END();
9365 break;
9366
9367 case IEMMODE_64BIT:
9368 IEM_MC_BEGIN(4, 2);
9369 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9370 IEM_MC_ARG(uint64_t, u64Src, 1);
9371 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9372 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9374
9375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9376 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9377 IEM_MC_ASSIGN(cShiftArg, cShift);
9378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9380 IEM_MC_FETCH_EFLAGS(EFlags);
9381 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9382 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9383
9384 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9385 IEM_MC_COMMIT_EFLAGS(EFlags);
9386 IEM_MC_ADVANCE_RIP_AND_FINISH();
9387 IEM_MC_END();
9388 break;
9389
9390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9391 }
9392 }
9393}
9394
9395
9396/**
9397 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9398 */
9399FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9400{
9401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9402 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9403
9404 if (IEM_IS_MODRM_REG_MODE(bRm))
9405 {
9406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9407
9408 switch (pVCpu->iem.s.enmEffOpSize)
9409 {
9410 case IEMMODE_16BIT:
9411 IEM_MC_BEGIN(4, 0);
9412 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9413 IEM_MC_ARG(uint16_t, u16Src, 1);
9414 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9415 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9416
9417 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9418 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9419 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9420 IEM_MC_REF_EFLAGS(pEFlags);
9421 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9422
9423 IEM_MC_ADVANCE_RIP_AND_FINISH();
9424 IEM_MC_END();
9425 break;
9426
9427 case IEMMODE_32BIT:
9428 IEM_MC_BEGIN(4, 0);
9429 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9430 IEM_MC_ARG(uint32_t, u32Src, 1);
9431 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9432 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9433
9434 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9435 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9436 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9437 IEM_MC_REF_EFLAGS(pEFlags);
9438 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9439
9440 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9441 IEM_MC_ADVANCE_RIP_AND_FINISH();
9442 IEM_MC_END();
9443 break;
9444
9445 case IEMMODE_64BIT:
9446 IEM_MC_BEGIN(4, 0);
9447 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9448 IEM_MC_ARG(uint64_t, u64Src, 1);
9449 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9450 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9451
9452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9453 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9454 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9455 IEM_MC_REF_EFLAGS(pEFlags);
9456 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9457
9458 IEM_MC_ADVANCE_RIP_AND_FINISH();
9459 IEM_MC_END();
9460 break;
9461
9462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9463 }
9464 }
9465 else
9466 {
9467 switch (pVCpu->iem.s.enmEffOpSize)
9468 {
9469 case IEMMODE_16BIT:
9470 IEM_MC_BEGIN(4, 2);
9471 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9472 IEM_MC_ARG(uint16_t, u16Src, 1);
9473 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9474 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9476
9477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9479 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9480 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9481 IEM_MC_FETCH_EFLAGS(EFlags);
9482 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9483 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9484
9485 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9486 IEM_MC_COMMIT_EFLAGS(EFlags);
9487 IEM_MC_ADVANCE_RIP_AND_FINISH();
9488 IEM_MC_END();
9489 break;
9490
9491 case IEMMODE_32BIT:
9492 IEM_MC_BEGIN(4, 2);
9493 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9494 IEM_MC_ARG(uint32_t, u32Src, 1);
9495 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9496 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9498
9499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9501 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9502 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9503 IEM_MC_FETCH_EFLAGS(EFlags);
9504 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9505 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9506
9507 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9508 IEM_MC_COMMIT_EFLAGS(EFlags);
9509 IEM_MC_ADVANCE_RIP_AND_FINISH();
9510 IEM_MC_END();
9511 break;
9512
9513 case IEMMODE_64BIT:
9514 IEM_MC_BEGIN(4, 2);
9515 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9516 IEM_MC_ARG(uint64_t, u64Src, 1);
9517 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9518 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9520
9521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9523 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9524 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9525 IEM_MC_FETCH_EFLAGS(EFlags);
9526 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9527 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9528
9529 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9530 IEM_MC_COMMIT_EFLAGS(EFlags);
9531 IEM_MC_ADVANCE_RIP_AND_FINISH();
9532 IEM_MC_END();
9533 break;
9534
9535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9536 }
9537 }
9538}
9539
9540
9541
9542/** Opcode 0x0f 0xa4. */
9543FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9544{
9545 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9546 IEMOP_HLP_MIN_386();
9547 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9548}
9549
9550
9551/** Opcode 0x0f 0xa5. */
9552FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9553{
9554 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9555 IEMOP_HLP_MIN_386();
9556 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9557}
9558
9559
9560/** Opcode 0x0f 0xa8. */
9561FNIEMOP_DEF(iemOp_push_gs)
9562{
9563 IEMOP_MNEMONIC(push_gs, "push gs");
9564 IEMOP_HLP_MIN_386();
9565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9566 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9567}
9568
9569
9570/** Opcode 0x0f 0xa9. */
9571FNIEMOP_DEF(iemOp_pop_gs)
9572{
9573 IEMOP_MNEMONIC(pop_gs, "pop gs");
9574 IEMOP_HLP_MIN_386();
9575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9576 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9577}
9578
9579
9580/** Opcode 0x0f 0xaa. */
9581FNIEMOP_DEF(iemOp_rsm)
9582{
9583 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9584 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9586 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9587}
9588
9589
9590
9591/** Opcode 0x0f 0xab. */
9592FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9593{
9594 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9595 IEMOP_HLP_MIN_386();
9596 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64, IEM_ACCESS_DATA_RW);
9597 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9598}
9599
9600
9601/** Opcode 0x0f 0xac. */
9602FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9603{
9604 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9605 IEMOP_HLP_MIN_386();
9606 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9607}
9608
9609
9610/** Opcode 0x0f 0xad. */
9611FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9612{
9613 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9614 IEMOP_HLP_MIN_386();
9615 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9616}
9617
9618
9619/** Opcode 0x0f 0xae mem/0. */
9620FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9621{
9622 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9623 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9624 return IEMOP_RAISE_INVALID_OPCODE();
9625
9626 IEM_MC_BEGIN(3, 1);
9627 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9628 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9629 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9632 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9633 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9634 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9635 IEM_MC_END();
9636}
9637
9638
9639/** Opcode 0x0f 0xae mem/1. */
9640FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9641{
9642 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9643 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9644 return IEMOP_RAISE_INVALID_OPCODE();
9645
9646 IEM_MC_BEGIN(3, 1);
9647 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9648 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9649 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9652 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9653 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9654 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9655 IEM_MC_END();
9656}
9657
9658
9659/**
9660 * @opmaps grp15
9661 * @opcode !11/2
9662 * @oppfx none
9663 * @opcpuid sse
9664 * @opgroup og_sse_mxcsrsm
9665 * @opxcpttype 5
9666 * @optest op1=0 -> mxcsr=0
9667 * @optest op1=0x2083 -> mxcsr=0x2083
9668 * @optest op1=0xfffffffe -> value.xcpt=0xd
9669 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9670 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9671 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9672 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9673 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9674 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9675 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9676 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9677 */
9678FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9679{
9680 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9681 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9682 return IEMOP_RAISE_INVALID_OPCODE();
9683
9684 IEM_MC_BEGIN(2, 0);
9685 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9686 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9689 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9690 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9691 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9692 IEM_MC_END();
9693}
9694
9695
9696/**
9697 * @opmaps grp15
9698 * @opcode !11/3
9699 * @oppfx none
9700 * @opcpuid sse
9701 * @opgroup og_sse_mxcsrsm
9702 * @opxcpttype 5
9703 * @optest mxcsr=0 -> op1=0
9704 * @optest mxcsr=0x2083 -> op1=0x2083
9705 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9706 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9707 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9708 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9709 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9710 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9711 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9712 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9713 */
9714FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9715{
9716 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9717 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9718 return IEMOP_RAISE_INVALID_OPCODE();
9719
9720 IEM_MC_BEGIN(2, 0);
9721 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9722 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9725 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9726 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9727 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9728 IEM_MC_END();
9729}
9730
9731
9732/**
9733 * @opmaps grp15
9734 * @opcode !11/4
9735 * @oppfx none
9736 * @opcpuid xsave
9737 * @opgroup og_system
9738 * @opxcpttype none
9739 */
9740FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9741{
9742 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9743 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9744 return IEMOP_RAISE_INVALID_OPCODE();
9745
9746 IEM_MC_BEGIN(3, 0);
9747 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9748 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9749 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9752 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9753 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9754 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9755 IEM_MC_END();
9756}
9757
9758
9759/**
9760 * @opmaps grp15
9761 * @opcode !11/5
9762 * @oppfx none
9763 * @opcpuid xsave
9764 * @opgroup og_system
9765 * @opxcpttype none
9766 */
9767FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9768{
9769 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9770 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9771 return IEMOP_RAISE_INVALID_OPCODE();
9772
9773 IEM_MC_BEGIN(3, 0);
9774 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9775 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9776 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9779 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9780 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9781 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9782 IEM_MC_END();
9783}
9784
9785/** Opcode 0x0f 0xae mem/6. */
9786FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9787
9788/**
9789 * @opmaps grp15
9790 * @opcode !11/7
9791 * @oppfx none
9792 * @opcpuid clfsh
9793 * @opgroup og_cachectl
9794 * @optest op1=1 ->
9795 */
9796FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9797{
9798 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9799 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9800 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9801
9802 IEM_MC_BEGIN(2, 0);
9803 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9804 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9807 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9808 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9809 IEM_MC_END();
9810}
9811
9812/**
9813 * @opmaps grp15
9814 * @opcode !11/7
9815 * @oppfx 0x66
9816 * @opcpuid clflushopt
9817 * @opgroup og_cachectl
9818 * @optest op1=1 ->
9819 */
9820FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9821{
9822 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9823 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9824 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9825
9826 IEM_MC_BEGIN(2, 0);
9827 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9828 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9831 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9832 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9833 IEM_MC_END();
9834}
9835
9836
9837/** Opcode 0x0f 0xae 11b/5. */
9838FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9839{
9840 RT_NOREF_PV(bRm);
9841 IEMOP_MNEMONIC(lfence, "lfence");
9842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9843 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9844 return IEMOP_RAISE_INVALID_OPCODE();
9845
9846 IEM_MC_BEGIN(0, 0);
9847#ifdef RT_ARCH_ARM64
9848 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9849#else
9850 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9851 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9852 else
9853 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9854#endif
9855 IEM_MC_ADVANCE_RIP_AND_FINISH();
9856 IEM_MC_END();
9857}
9858
9859
9860/** Opcode 0x0f 0xae 11b/6. */
9861FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9862{
9863 RT_NOREF_PV(bRm);
9864 IEMOP_MNEMONIC(mfence, "mfence");
9865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9866 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9867 return IEMOP_RAISE_INVALID_OPCODE();
9868
9869 IEM_MC_BEGIN(0, 0);
9870#ifdef RT_ARCH_ARM64
9871 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9872#else
9873 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9874 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9875 else
9876 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9877#endif
9878 IEM_MC_ADVANCE_RIP_AND_FINISH();
9879 IEM_MC_END();
9880}
9881
9882
9883/** Opcode 0x0f 0xae 11b/7. */
9884FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9885{
9886 RT_NOREF_PV(bRm);
9887 IEMOP_MNEMONIC(sfence, "sfence");
9888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9889 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9890 return IEMOP_RAISE_INVALID_OPCODE();
9891
9892 IEM_MC_BEGIN(0, 0);
9893#ifdef RT_ARCH_ARM64
9894 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9895#else
9896 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9897 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9898 else
9899 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9900#endif
9901 IEM_MC_ADVANCE_RIP_AND_FINISH();
9902 IEM_MC_END();
9903}
9904
9905
9906/** Opcode 0xf3 0x0f 0xae 11b/0. */
9907FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9908{
9909 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9911 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9912 {
9913 IEM_MC_BEGIN(1, 0);
9914 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9915 IEM_MC_ARG(uint64_t, u64Dst, 0);
9916 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9917 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9918 IEM_MC_ADVANCE_RIP_AND_FINISH();
9919 IEM_MC_END();
9920 }
9921 else
9922 {
9923 IEM_MC_BEGIN(1, 0);
9924 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9925 IEM_MC_ARG(uint32_t, u32Dst, 0);
9926 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9927 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9928 IEM_MC_ADVANCE_RIP_AND_FINISH();
9929 IEM_MC_END();
9930 }
9931}
9932
9933
9934/** Opcode 0xf3 0x0f 0xae 11b/1. */
9935FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9936{
9937 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9939 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9940 {
9941 IEM_MC_BEGIN(1, 0);
9942 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9943 IEM_MC_ARG(uint64_t, u64Dst, 0);
9944 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9945 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9946 IEM_MC_ADVANCE_RIP_AND_FINISH();
9947 IEM_MC_END();
9948 }
9949 else
9950 {
9951 IEM_MC_BEGIN(1, 0);
9952 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9953 IEM_MC_ARG(uint32_t, u32Dst, 0);
9954 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9955 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9956 IEM_MC_ADVANCE_RIP_AND_FINISH();
9957 IEM_MC_END();
9958 }
9959}
9960
9961
9962/** Opcode 0xf3 0x0f 0xae 11b/2. */
9963FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9964{
9965 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9967 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9968 {
9969 IEM_MC_BEGIN(1, 0);
9970 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9971 IEM_MC_ARG(uint64_t, u64Dst, 0);
9972 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9973 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9974 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9975 IEM_MC_ADVANCE_RIP_AND_FINISH();
9976 IEM_MC_END();
9977 }
9978 else
9979 {
9980 IEM_MC_BEGIN(1, 0);
9981 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9982 IEM_MC_ARG(uint32_t, u32Dst, 0);
9983 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9984 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9985 IEM_MC_ADVANCE_RIP_AND_FINISH();
9986 IEM_MC_END();
9987 }
9988}
9989
9990
9991/** Opcode 0xf3 0x0f 0xae 11b/3. */
9992FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9993{
9994 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9996 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9997 {
9998 IEM_MC_BEGIN(1, 0);
9999 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10000 IEM_MC_ARG(uint64_t, u64Dst, 0);
10001 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10002 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10003 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10004 IEM_MC_ADVANCE_RIP_AND_FINISH();
10005 IEM_MC_END();
10006 }
10007 else
10008 {
10009 IEM_MC_BEGIN(1, 0);
10010 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10011 IEM_MC_ARG(uint32_t, u32Dst, 0);
10012 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10013 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10014 IEM_MC_ADVANCE_RIP_AND_FINISH();
10015 IEM_MC_END();
10016 }
10017}
10018
10019
10020/**
10021 * Group 15 jump table for register variant.
10022 */
10023IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10024{ /* pfx: none, 066h, 0f3h, 0f2h */
10025 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10026 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10027 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10028 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10029 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10030 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10031 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10032 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10033};
10034AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10035
10036
10037/**
10038 * Group 15 jump table for memory variant.
10039 */
10040IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10041{ /* pfx: none, 066h, 0f3h, 0f2h */
10042 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10043 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10044 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10045 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10046 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10047 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10048 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10049 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10050};
10051AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10052
10053
10054/** Opcode 0x0f 0xae. */
10055FNIEMOP_DEF(iemOp_Grp15)
10056{
10057 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10059 if (IEM_IS_MODRM_REG_MODE(bRm))
10060 /* register, register */
10061 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10062 + pVCpu->iem.s.idxPrefix], bRm);
10063 /* memory, register */
10064 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10065 + pVCpu->iem.s.idxPrefix], bRm);
10066}
10067
10068
10069/** Opcode 0x0f 0xaf. */
10070FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10071{
10072 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10073 IEMOP_HLP_MIN_386();
10074 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10075 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10076 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10077}
10078
10079
10080/** Opcode 0x0f 0xb0. */
10081FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10082{
10083 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10084 IEMOP_HLP_MIN_486();
10085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10086
10087 if (IEM_IS_MODRM_REG_MODE(bRm))
10088 {
10089 IEMOP_HLP_DONE_DECODING();
10090 IEM_MC_BEGIN(4, 0);
10091 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10092 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10093 IEM_MC_ARG(uint8_t, u8Src, 2);
10094 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10095
10096 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10097 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10098 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10099 IEM_MC_REF_EFLAGS(pEFlags);
10100 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10101 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10102 else
10103 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10104
10105 IEM_MC_ADVANCE_RIP_AND_FINISH();
10106 IEM_MC_END();
10107 }
10108 else
10109 {
10110 IEM_MC_BEGIN(4, 3);
10111 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10112 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10113 IEM_MC_ARG(uint8_t, u8Src, 2);
10114 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10116 IEM_MC_LOCAL(uint8_t, u8Al);
10117
10118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10119 IEMOP_HLP_DONE_DECODING();
10120 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10121 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10122 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10123 IEM_MC_FETCH_EFLAGS(EFlags);
10124 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10125 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10126 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10127 else
10128 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10129
10130 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10131 IEM_MC_COMMIT_EFLAGS(EFlags);
10132 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10133 IEM_MC_ADVANCE_RIP_AND_FINISH();
10134 IEM_MC_END();
10135 }
10136}
10137
10138/** Opcode 0x0f 0xb1. */
10139FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10140{
10141 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10142 IEMOP_HLP_MIN_486();
10143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10144
10145 if (IEM_IS_MODRM_REG_MODE(bRm))
10146 {
10147 IEMOP_HLP_DONE_DECODING();
10148 switch (pVCpu->iem.s.enmEffOpSize)
10149 {
10150 case IEMMODE_16BIT:
10151 IEM_MC_BEGIN(4, 0);
10152 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10153 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10154 IEM_MC_ARG(uint16_t, u16Src, 2);
10155 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10156
10157 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10158 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10159 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10160 IEM_MC_REF_EFLAGS(pEFlags);
10161 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10162 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10163 else
10164 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10165
10166 IEM_MC_ADVANCE_RIP_AND_FINISH();
10167 IEM_MC_END();
10168 break;
10169
10170 case IEMMODE_32BIT:
10171 IEM_MC_BEGIN(4, 0);
10172 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10173 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10174 IEM_MC_ARG(uint32_t, u32Src, 2);
10175 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10176
10177 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10178 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10179 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10180 IEM_MC_REF_EFLAGS(pEFlags);
10181 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10182 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10183 else
10184 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10185
10186 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10187 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10188 } IEM_MC_ELSE() {
10189 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10190 } IEM_MC_ENDIF();
10191
10192 IEM_MC_ADVANCE_RIP_AND_FINISH();
10193 IEM_MC_END();
10194 break;
10195
10196 case IEMMODE_64BIT:
10197 IEM_MC_BEGIN(4, 0);
10198 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10199 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10200#ifdef RT_ARCH_X86
10201 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10202#else
10203 IEM_MC_ARG(uint64_t, u64Src, 2);
10204#endif
10205 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10206
10207 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10208 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10209 IEM_MC_REF_EFLAGS(pEFlags);
10210#ifdef RT_ARCH_X86
10211 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10212 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10213 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10214 else
10215 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10216#else
10217 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10218 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10219 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10220 else
10221 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10222#endif
10223
10224 IEM_MC_ADVANCE_RIP_AND_FINISH();
10225 IEM_MC_END();
10226 break;
10227
10228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10229 }
10230 }
10231 else
10232 {
10233 switch (pVCpu->iem.s.enmEffOpSize)
10234 {
10235 case IEMMODE_16BIT:
10236 IEM_MC_BEGIN(4, 3);
10237 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10238 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10239 IEM_MC_ARG(uint16_t, u16Src, 2);
10240 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10242 IEM_MC_LOCAL(uint16_t, u16Ax);
10243
10244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10245 IEMOP_HLP_DONE_DECODING();
10246 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10247 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10248 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10249 IEM_MC_FETCH_EFLAGS(EFlags);
10250 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10251 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10252 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10253 else
10254 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10255
10256 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10257 IEM_MC_COMMIT_EFLAGS(EFlags);
10258 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10259 IEM_MC_ADVANCE_RIP_AND_FINISH();
10260 IEM_MC_END();
10261 break;
10262
10263 case IEMMODE_32BIT:
10264 IEM_MC_BEGIN(4, 3);
10265 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10266 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10267 IEM_MC_ARG(uint32_t, u32Src, 2);
10268 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10270 IEM_MC_LOCAL(uint32_t, u32Eax);
10271
10272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10273 IEMOP_HLP_DONE_DECODING();
10274 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10275 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10276 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10277 IEM_MC_FETCH_EFLAGS(EFlags);
10278 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10279 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10280 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10281 else
10282 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10283
10284 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10285 IEM_MC_COMMIT_EFLAGS(EFlags);
10286
10287 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10288 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10289 } IEM_MC_ENDIF();
10290
10291 IEM_MC_ADVANCE_RIP_AND_FINISH();
10292 IEM_MC_END();
10293 break;
10294
10295 case IEMMODE_64BIT:
10296 IEM_MC_BEGIN(4, 3);
10297 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10298 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10299#ifdef RT_ARCH_X86
10300 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10301#else
10302 IEM_MC_ARG(uint64_t, u64Src, 2);
10303#endif
10304 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10306 IEM_MC_LOCAL(uint64_t, u64Rax);
10307
10308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10309 IEMOP_HLP_DONE_DECODING();
10310 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10311 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10312 IEM_MC_FETCH_EFLAGS(EFlags);
10313 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10314#ifdef RT_ARCH_X86
10315 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10316 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10317 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10318 else
10319 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10320#else
10321 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10323 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10324 else
10325 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10326#endif
10327
10328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10329 IEM_MC_COMMIT_EFLAGS(EFlags);
10330 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10331 IEM_MC_ADVANCE_RIP_AND_FINISH();
10332 IEM_MC_END();
10333 break;
10334
10335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10336 }
10337 }
10338}
10339
10340
10341FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
10342{
10343 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
10344 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
10345
10346 switch (pVCpu->iem.s.enmEffOpSize)
10347 {
10348 case IEMMODE_16BIT:
10349 IEM_MC_BEGIN(5, 1);
10350 IEM_MC_ARG(uint16_t, uSel, 0);
10351 IEM_MC_ARG(uint16_t, offSeg, 1);
10352 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10353 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10354 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10355 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10358 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10359 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
10360 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10361 IEM_MC_END();
10362
10363 case IEMMODE_32BIT:
10364 IEM_MC_BEGIN(5, 1);
10365 IEM_MC_ARG(uint16_t, uSel, 0);
10366 IEM_MC_ARG(uint32_t, offSeg, 1);
10367 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10368 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10369 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10370 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10373 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10374 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
10375 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10376 IEM_MC_END();
10377
10378 case IEMMODE_64BIT:
10379 IEM_MC_BEGIN(5, 1);
10380 IEM_MC_ARG(uint16_t, uSel, 0);
10381 IEM_MC_ARG(uint64_t, offSeg, 1);
10382 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10383 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10384 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10385 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10388 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
10389 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10390 else
10391 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10392 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
10393 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10394 IEM_MC_END();
10395
10396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10397 }
10398}
10399
10400
10401/** Opcode 0x0f 0xb2. */
10402FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10403{
10404 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10405 IEMOP_HLP_MIN_386();
10406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10407 if (IEM_IS_MODRM_REG_MODE(bRm))
10408 return IEMOP_RAISE_INVALID_OPCODE();
10409 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10410}
10411
10412
10413/** Opcode 0x0f 0xb3. */
10414FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10415{
10416 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10417 IEMOP_HLP_MIN_386();
10418 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64, IEM_ACCESS_DATA_RW);
10419 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10420}
10421
10422
10423/** Opcode 0x0f 0xb4. */
10424FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10425{
10426 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10427 IEMOP_HLP_MIN_386();
10428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10429 if (IEM_IS_MODRM_REG_MODE(bRm))
10430 return IEMOP_RAISE_INVALID_OPCODE();
10431 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10432}
10433
10434
10435/** Opcode 0x0f 0xb5. */
10436FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10437{
10438 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10439 IEMOP_HLP_MIN_386();
10440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10441 if (IEM_IS_MODRM_REG_MODE(bRm))
10442 return IEMOP_RAISE_INVALID_OPCODE();
10443 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10444}
10445
10446
10447/** Opcode 0x0f 0xb6. */
10448FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10449{
10450 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10451 IEMOP_HLP_MIN_386();
10452
10453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10454
10455 /*
10456 * If rm is denoting a register, no more instruction bytes.
10457 */
10458 if (IEM_IS_MODRM_REG_MODE(bRm))
10459 {
10460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10461 switch (pVCpu->iem.s.enmEffOpSize)
10462 {
10463 case IEMMODE_16BIT:
10464 IEM_MC_BEGIN(0, 1);
10465 IEM_MC_LOCAL(uint16_t, u16Value);
10466 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10467 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10468 IEM_MC_ADVANCE_RIP_AND_FINISH();
10469 IEM_MC_END();
10470 break;
10471
10472 case IEMMODE_32BIT:
10473 IEM_MC_BEGIN(0, 1);
10474 IEM_MC_LOCAL(uint32_t, u32Value);
10475 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10476 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10477 IEM_MC_ADVANCE_RIP_AND_FINISH();
10478 IEM_MC_END();
10479 break;
10480
10481 case IEMMODE_64BIT:
10482 IEM_MC_BEGIN(0, 1);
10483 IEM_MC_LOCAL(uint64_t, u64Value);
10484 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10485 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10486 IEM_MC_ADVANCE_RIP_AND_FINISH();
10487 IEM_MC_END();
10488 break;
10489
10490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10491 }
10492 }
10493 else
10494 {
10495 /*
10496 * We're loading a register from memory.
10497 */
10498 switch (pVCpu->iem.s.enmEffOpSize)
10499 {
10500 case IEMMODE_16BIT:
10501 IEM_MC_BEGIN(0, 2);
10502 IEM_MC_LOCAL(uint16_t, u16Value);
10503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10506 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10507 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10508 IEM_MC_ADVANCE_RIP_AND_FINISH();
10509 IEM_MC_END();
10510 break;
10511
10512 case IEMMODE_32BIT:
10513 IEM_MC_BEGIN(0, 2);
10514 IEM_MC_LOCAL(uint32_t, u32Value);
10515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10518 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10519 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10520 IEM_MC_ADVANCE_RIP_AND_FINISH();
10521 IEM_MC_END();
10522 break;
10523
10524 case IEMMODE_64BIT:
10525 IEM_MC_BEGIN(0, 2);
10526 IEM_MC_LOCAL(uint64_t, u64Value);
10527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10530 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10531 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10532 IEM_MC_ADVANCE_RIP_AND_FINISH();
10533 IEM_MC_END();
10534 break;
10535
10536 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10537 }
10538 }
10539}
10540
10541
10542/** Opcode 0x0f 0xb7. */
10543FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10544{
10545 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10546 IEMOP_HLP_MIN_386();
10547
10548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10549
10550 /** @todo Not entirely sure how the operand size prefix is handled here,
10551 * assuming that it will be ignored. Would be nice to have a few
10552 * test for this. */
10553 /*
10554 * If rm is denoting a register, no more instruction bytes.
10555 */
10556 if (IEM_IS_MODRM_REG_MODE(bRm))
10557 {
10558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10559 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10560 {
10561 IEM_MC_BEGIN(0, 1);
10562 IEM_MC_LOCAL(uint32_t, u32Value);
10563 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10564 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10565 IEM_MC_ADVANCE_RIP_AND_FINISH();
10566 IEM_MC_END();
10567 }
10568 else
10569 {
10570 IEM_MC_BEGIN(0, 1);
10571 IEM_MC_LOCAL(uint64_t, u64Value);
10572 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10573 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10574 IEM_MC_ADVANCE_RIP_AND_FINISH();
10575 IEM_MC_END();
10576 }
10577 }
10578 else
10579 {
10580 /*
10581 * We're loading a register from memory.
10582 */
10583 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10584 {
10585 IEM_MC_BEGIN(0, 2);
10586 IEM_MC_LOCAL(uint32_t, u32Value);
10587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10590 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10591 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10592 IEM_MC_ADVANCE_RIP_AND_FINISH();
10593 IEM_MC_END();
10594 }
10595 else
10596 {
10597 IEM_MC_BEGIN(0, 2);
10598 IEM_MC_LOCAL(uint64_t, u64Value);
10599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10602 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10603 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10604 IEM_MC_ADVANCE_RIP_AND_FINISH();
10605 IEM_MC_END();
10606 }
10607 }
10608}
10609
10610
10611/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10612FNIEMOP_UD_STUB(iemOp_jmpe);
10613
10614
10615/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10616FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10617{
10618 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10619 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10620 return iemOp_InvalidNeedRM(pVCpu);
10621#ifndef TST_IEM_CHECK_MC
10622# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10623 static const IEMOPBINSIZES s_Native =
10624 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10625# endif
10626 static const IEMOPBINSIZES s_Fallback =
10627 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10628#endif
10629 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10630 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10631}
10632
10633
10634/**
10635 * @opcode 0xb9
10636 * @opinvalid intel-modrm
10637 * @optest ->
10638 */
10639FNIEMOP_DEF(iemOp_Grp10)
10640{
10641 /*
10642 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10643 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10644 */
10645 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10646 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10647 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10648}
10649
10650
10651/**
10652 * Body for group 8 bit instruction.
10653 */
10654#define IEMOP_BODY_BIT_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
10655 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10656 \
10657 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10658 { \
10659 /* register destination. */ \
10660 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10662 \
10663 switch (pVCpu->iem.s.enmEffOpSize) \
10664 { \
10665 case IEMMODE_16BIT: \
10666 IEM_MC_BEGIN(3, 0); \
10667 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10668 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10669 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10670 \
10671 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10672 IEM_MC_REF_EFLAGS(pEFlags); \
10673 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10674 \
10675 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10676 IEM_MC_END(); \
10677 break; \
10678 \
10679 case IEMMODE_32BIT: \
10680 IEM_MC_BEGIN(3, 0); \
10681 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10682 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10683 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10684 \
10685 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10686 IEM_MC_REF_EFLAGS(pEFlags); \
10687 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10688 \
10689 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
10690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10691 IEM_MC_END(); \
10692 break; \
10693 \
10694 case IEMMODE_64BIT: \
10695 IEM_MC_BEGIN(3, 0); \
10696 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10697 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10698 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10699 \
10700 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10701 IEM_MC_REF_EFLAGS(pEFlags); \
10702 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10703 \
10704 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10705 IEM_MC_END(); \
10706 break; \
10707 \
10708 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10709 } \
10710 } \
10711 else \
10712 { \
10713 /* memory destination. */ \
10714 /** @todo test negative bit offsets! */ \
10715 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10716 { \
10717 switch (pVCpu->iem.s.enmEffOpSize) \
10718 { \
10719 case IEMMODE_16BIT: \
10720 IEM_MC_BEGIN(3, 1); \
10721 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10722 IEM_MC_ARG(uint16_t, u16Src, 1); \
10723 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10725 \
10726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10727 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10728 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10729 IEMOP_HLP_DONE_DECODING(); \
10730 IEM_MC_FETCH_EFLAGS(EFlags); \
10731 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10732 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10733 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
10734 \
10735 IEM_MC_COMMIT_EFLAGS(EFlags); \
10736 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10737 IEM_MC_END(); \
10738 break; \
10739 \
10740 case IEMMODE_32BIT: \
10741 IEM_MC_BEGIN(3, 1); \
10742 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10743 IEM_MC_ARG(uint32_t, u32Src, 1); \
10744 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10746 \
10747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10748 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10749 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10750 IEMOP_HLP_DONE_DECODING(); \
10751 IEM_MC_FETCH_EFLAGS(EFlags); \
10752 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10753 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10754 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
10755 \
10756 IEM_MC_COMMIT_EFLAGS(EFlags); \
10757 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10758 IEM_MC_END(); \
10759 break; \
10760 \
10761 case IEMMODE_64BIT: \
10762 IEM_MC_BEGIN(3, 1); \
10763 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10764 IEM_MC_ARG(uint64_t, u64Src, 1); \
10765 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10767 \
10768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10769 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10770 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10771 IEMOP_HLP_DONE_DECODING(); \
10772 IEM_MC_FETCH_EFLAGS(EFlags); \
10773 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10774 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10775 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
10776 \
10777 IEM_MC_COMMIT_EFLAGS(EFlags); \
10778 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10779 IEM_MC_END(); \
10780 break; \
10781 \
10782 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10783 } \
10784 } \
10785 else \
10786 { \
10787 (void)0
10788
10789#define IEMOP_BODY_BIT_Ev_Ib_NO_LOCK() \
10790 IEMOP_HLP_DONE_DECODING(); \
10791 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
10792 } \
10793 } \
10794 (void)0
10795
10796#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10797 switch (pVCpu->iem.s.enmEffOpSize) \
10798 { \
10799 case IEMMODE_16BIT: \
10800 IEM_MC_BEGIN(3, 1); \
10801 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10802 IEM_MC_ARG(uint16_t, u16Src, 1); \
10803 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10805 \
10806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10807 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10808 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10809 IEMOP_HLP_DONE_DECODING(); \
10810 IEM_MC_FETCH_EFLAGS(EFlags); \
10811 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10812 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10813 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
10814 \
10815 IEM_MC_COMMIT_EFLAGS(EFlags); \
10816 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10817 IEM_MC_END(); \
10818 break; \
10819 \
10820 case IEMMODE_32BIT: \
10821 IEM_MC_BEGIN(3, 1); \
10822 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10823 IEM_MC_ARG(uint32_t, u32Src, 1); \
10824 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10826 \
10827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10828 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10829 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10830 IEMOP_HLP_DONE_DECODING(); \
10831 IEM_MC_FETCH_EFLAGS(EFlags); \
10832 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10833 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10834 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
10835 \
10836 IEM_MC_COMMIT_EFLAGS(EFlags); \
10837 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10838 IEM_MC_END(); \
10839 break; \
10840 \
10841 case IEMMODE_64BIT: \
10842 IEM_MC_BEGIN(3, 1); \
10843 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10844 IEM_MC_ARG(uint64_t, u64Src, 1); \
10845 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10847 \
10848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10849 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10850 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10851 IEMOP_HLP_DONE_DECODING(); \
10852 IEM_MC_FETCH_EFLAGS(EFlags); \
10853 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10854 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10855 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
10856 \
10857 IEM_MC_COMMIT_EFLAGS(EFlags); \
10858 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10859 IEM_MC_END(); \
10860 break; \
10861 \
10862 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10863 } \
10864 } \
10865 } \
10866 (void)0
10867
10868
10869/** Opcode 0x0f 0xba /4. */
10870FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
10871{
10872 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
10873 IEMOP_BODY_BIT_Ev_Ib(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64, IEM_ACCESS_DATA_R);
10874 IEMOP_BODY_BIT_Ev_Ib_NO_LOCK();
10875}
10876
10877
10878/** Opcode 0x0f 0xba /5. */
10879FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
10880{
10881 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
10882 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64, IEM_ACCESS_DATA_RW);
10883 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
10884}
10885
10886
10887/** Opcode 0x0f 0xba /6. */
10888FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
10889{
10890 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
10891 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64, IEM_ACCESS_DATA_RW);
10892 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10893}
10894
10895
10896/** Opcode 0x0f 0xba /7. */
10897FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
10898{
10899 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
10900 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64, IEM_ACCESS_DATA_RW);
10901 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10902}
10903
10904
10905/** Opcode 0x0f 0xba. */
10906FNIEMOP_DEF(iemOp_Grp8)
10907{
10908 IEMOP_HLP_MIN_386();
10909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10910 switch (IEM_GET_MODRM_REG_8(bRm))
10911 {
10912 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
10913 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
10914 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
10915 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
10916
10917 case 0: case 1: case 2: case 3:
10918 /* Both AMD and Intel want full modr/m decoding and imm8. */
10919 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10920
10921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10922 }
10923}
10924
10925
10926/** Opcode 0x0f 0xbb. */
10927FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10928{
10929 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10930 IEMOP_HLP_MIN_386();
10931 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64, IEM_ACCESS_DATA_RW);
10932 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10933}
10934
10935
10936/**
10937 * Common worker for BSF and BSR instructions.
10938 *
10939 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10940 * the destination register, which means that for 32-bit operations the high
10941 * bits must be left alone.
10942 *
10943 * @param pImpl Pointer to the instruction implementation (assembly).
10944 */
10945FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10946{
10947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10948
10949 /*
10950 * If rm is denoting a register, no more instruction bytes.
10951 */
10952 if (IEM_IS_MODRM_REG_MODE(bRm))
10953 {
10954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10955 switch (pVCpu->iem.s.enmEffOpSize)
10956 {
10957 case IEMMODE_16BIT:
10958 IEM_MC_BEGIN(3, 0);
10959 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10960 IEM_MC_ARG(uint16_t, u16Src, 1);
10961 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10962
10963 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10964 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10965 IEM_MC_REF_EFLAGS(pEFlags);
10966 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10967
10968 IEM_MC_ADVANCE_RIP_AND_FINISH();
10969 IEM_MC_END();
10970 break;
10971
10972 case IEMMODE_32BIT:
10973 IEM_MC_BEGIN(3, 0);
10974 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10975 IEM_MC_ARG(uint32_t, u32Src, 1);
10976 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10977
10978 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10979 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10980 IEM_MC_REF_EFLAGS(pEFlags);
10981 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10982 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10983 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10984 } IEM_MC_ENDIF();
10985 IEM_MC_ADVANCE_RIP_AND_FINISH();
10986 IEM_MC_END();
10987 break;
10988
10989 case IEMMODE_64BIT:
10990 IEM_MC_BEGIN(3, 0);
10991 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10992 IEM_MC_ARG(uint64_t, u64Src, 1);
10993 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10994
10995 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10996 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10997 IEM_MC_REF_EFLAGS(pEFlags);
10998 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10999
11000 IEM_MC_ADVANCE_RIP_AND_FINISH();
11001 IEM_MC_END();
11002 break;
11003
11004 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11005 }
11006 }
11007 else
11008 {
11009 /*
11010 * We're accessing memory.
11011 */
11012 switch (pVCpu->iem.s.enmEffOpSize)
11013 {
11014 case IEMMODE_16BIT:
11015 IEM_MC_BEGIN(3, 1);
11016 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11017 IEM_MC_ARG(uint16_t, u16Src, 1);
11018 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11020
11021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11023 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11024 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11025 IEM_MC_REF_EFLAGS(pEFlags);
11026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11027
11028 IEM_MC_ADVANCE_RIP_AND_FINISH();
11029 IEM_MC_END();
11030 break;
11031
11032 case IEMMODE_32BIT:
11033 IEM_MC_BEGIN(3, 1);
11034 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11035 IEM_MC_ARG(uint32_t, u32Src, 1);
11036 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11038
11039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11041 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11042 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11043 IEM_MC_REF_EFLAGS(pEFlags);
11044 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11045
11046 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11047 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11048 } IEM_MC_ENDIF();
11049 IEM_MC_ADVANCE_RIP_AND_FINISH();
11050 IEM_MC_END();
11051 break;
11052
11053 case IEMMODE_64BIT:
11054 IEM_MC_BEGIN(3, 1);
11055 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11056 IEM_MC_ARG(uint64_t, u64Src, 1);
11057 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11059
11060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11062 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11063 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11064 IEM_MC_REF_EFLAGS(pEFlags);
11065 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11066
11067 IEM_MC_ADVANCE_RIP_AND_FINISH();
11068 IEM_MC_END();
11069 break;
11070
11071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11072 }
11073 }
11074}
11075
11076
11077/** Opcode 0x0f 0xbc. */
11078FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11079{
11080 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11081 IEMOP_HLP_MIN_386();
11082 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11083 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11084}
11085
11086
11087/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11088FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11089{
11090 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11091 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11092 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11093
11094#ifndef TST_IEM_CHECK_MC
11095 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11096 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11097 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11098 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11099 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11100 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11101 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11102 {
11103 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11104 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11105 };
11106#endif
11107 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11108 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11109 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11110 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11111}
11112
11113
11114/** Opcode 0x0f 0xbd. */
11115FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11116{
11117 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11118 IEMOP_HLP_MIN_386();
11119 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11120 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11121}
11122
11123
11124/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11125FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11126{
11127 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11128 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11129 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11130
11131#ifndef TST_IEM_CHECK_MC
11132 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11133 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11134 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11135 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11136 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11137 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11138 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11139 {
11140 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11141 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11142 };
11143#endif
11144 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11145 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11146 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11147 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11148}
11149
11150
11151
11152/** Opcode 0x0f 0xbe. */
11153FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11154{
11155 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11156 IEMOP_HLP_MIN_386();
11157
11158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11159
11160 /*
11161 * If rm is denoting a register, no more instruction bytes.
11162 */
11163 if (IEM_IS_MODRM_REG_MODE(bRm))
11164 {
11165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11166 switch (pVCpu->iem.s.enmEffOpSize)
11167 {
11168 case IEMMODE_16BIT:
11169 IEM_MC_BEGIN(0, 1);
11170 IEM_MC_LOCAL(uint16_t, u16Value);
11171 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11172 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11173 IEM_MC_ADVANCE_RIP_AND_FINISH();
11174 IEM_MC_END();
11175 break;
11176
11177 case IEMMODE_32BIT:
11178 IEM_MC_BEGIN(0, 1);
11179 IEM_MC_LOCAL(uint32_t, u32Value);
11180 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11181 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11182 IEM_MC_ADVANCE_RIP_AND_FINISH();
11183 IEM_MC_END();
11184 break;
11185
11186 case IEMMODE_64BIT:
11187 IEM_MC_BEGIN(0, 1);
11188 IEM_MC_LOCAL(uint64_t, u64Value);
11189 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11190 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11191 IEM_MC_ADVANCE_RIP_AND_FINISH();
11192 IEM_MC_END();
11193 break;
11194
11195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11196 }
11197 }
11198 else
11199 {
11200 /*
11201 * We're loading a register from memory.
11202 */
11203 switch (pVCpu->iem.s.enmEffOpSize)
11204 {
11205 case IEMMODE_16BIT:
11206 IEM_MC_BEGIN(0, 2);
11207 IEM_MC_LOCAL(uint16_t, u16Value);
11208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11211 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11212 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11213 IEM_MC_ADVANCE_RIP_AND_FINISH();
11214 IEM_MC_END();
11215 break;
11216
11217 case IEMMODE_32BIT:
11218 IEM_MC_BEGIN(0, 2);
11219 IEM_MC_LOCAL(uint32_t, u32Value);
11220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11223 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11224 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11225 IEM_MC_ADVANCE_RIP_AND_FINISH();
11226 IEM_MC_END();
11227 break;
11228
11229 case IEMMODE_64BIT:
11230 IEM_MC_BEGIN(0, 2);
11231 IEM_MC_LOCAL(uint64_t, u64Value);
11232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11235 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11236 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11237 IEM_MC_ADVANCE_RIP_AND_FINISH();
11238 IEM_MC_END();
11239 break;
11240
11241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11242 }
11243 }
11244}
11245
11246
11247/** Opcode 0x0f 0xbf. */
11248FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11249{
11250 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11251 IEMOP_HLP_MIN_386();
11252
11253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11254
11255 /** @todo Not entirely sure how the operand size prefix is handled here,
11256 * assuming that it will be ignored. Would be nice to have a few
11257 * test for this. */
11258 /*
11259 * If rm is denoting a register, no more instruction bytes.
11260 */
11261 if (IEM_IS_MODRM_REG_MODE(bRm))
11262 {
11263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11264 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11265 {
11266 IEM_MC_BEGIN(0, 1);
11267 IEM_MC_LOCAL(uint32_t, u32Value);
11268 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11269 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11270 IEM_MC_ADVANCE_RIP_AND_FINISH();
11271 IEM_MC_END();
11272 }
11273 else
11274 {
11275 IEM_MC_BEGIN(0, 1);
11276 IEM_MC_LOCAL(uint64_t, u64Value);
11277 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11278 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11279 IEM_MC_ADVANCE_RIP_AND_FINISH();
11280 IEM_MC_END();
11281 }
11282 }
11283 else
11284 {
11285 /*
11286 * We're loading a register from memory.
11287 */
11288 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11289 {
11290 IEM_MC_BEGIN(0, 2);
11291 IEM_MC_LOCAL(uint32_t, u32Value);
11292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11295 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11296 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11297 IEM_MC_ADVANCE_RIP_AND_FINISH();
11298 IEM_MC_END();
11299 }
11300 else
11301 {
11302 IEM_MC_BEGIN(0, 2);
11303 IEM_MC_LOCAL(uint64_t, u64Value);
11304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11307 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11308 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11309 IEM_MC_ADVANCE_RIP_AND_FINISH();
11310 IEM_MC_END();
11311 }
11312 }
11313}
11314
11315
11316/** Opcode 0x0f 0xc0. */
11317FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11318{
11319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11320 IEMOP_HLP_MIN_486();
11321 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11322
11323 /*
11324 * If rm is denoting a register, no more instruction bytes.
11325 */
11326 if (IEM_IS_MODRM_REG_MODE(bRm))
11327 {
11328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11329
11330 IEM_MC_BEGIN(3, 0);
11331 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11332 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11333 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11334
11335 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11336 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11337 IEM_MC_REF_EFLAGS(pEFlags);
11338 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11339
11340 IEM_MC_ADVANCE_RIP_AND_FINISH();
11341 IEM_MC_END();
11342 }
11343 else
11344 {
11345 /*
11346 * We're accessing memory.
11347 */
11348 IEM_MC_BEGIN(3, 3);
11349 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11350 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11351 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11352 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11354
11355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11356 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11357 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11358 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11359 IEM_MC_FETCH_EFLAGS(EFlags);
11360 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11361 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11362 else
11363 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11364
11365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11366 IEM_MC_COMMIT_EFLAGS(EFlags);
11367 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11368 IEM_MC_ADVANCE_RIP_AND_FINISH();
11369 IEM_MC_END();
11370 }
11371}
11372
11373
11374/** Opcode 0x0f 0xc1. */
11375FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11376{
11377 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11378 IEMOP_HLP_MIN_486();
11379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11380
11381 /*
11382 * If rm is denoting a register, no more instruction bytes.
11383 */
11384 if (IEM_IS_MODRM_REG_MODE(bRm))
11385 {
11386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11387
11388 switch (pVCpu->iem.s.enmEffOpSize)
11389 {
11390 case IEMMODE_16BIT:
11391 IEM_MC_BEGIN(3, 0);
11392 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11393 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11394 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11395
11396 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11397 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11398 IEM_MC_REF_EFLAGS(pEFlags);
11399 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11400
11401 IEM_MC_ADVANCE_RIP_AND_FINISH();
11402 IEM_MC_END();
11403 break;
11404
11405 case IEMMODE_32BIT:
11406 IEM_MC_BEGIN(3, 0);
11407 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11408 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11409 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11410
11411 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11412 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11413 IEM_MC_REF_EFLAGS(pEFlags);
11414 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11415
11416 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11417 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11418 IEM_MC_ADVANCE_RIP_AND_FINISH();
11419 IEM_MC_END();
11420 break;
11421
11422 case IEMMODE_64BIT:
11423 IEM_MC_BEGIN(3, 0);
11424 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11425 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11426 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11427
11428 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11429 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11430 IEM_MC_REF_EFLAGS(pEFlags);
11431 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11432
11433 IEM_MC_ADVANCE_RIP_AND_FINISH();
11434 IEM_MC_END();
11435 break;
11436
11437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11438 }
11439 }
11440 else
11441 {
11442 /*
11443 * We're accessing memory.
11444 */
11445 switch (pVCpu->iem.s.enmEffOpSize)
11446 {
11447 case IEMMODE_16BIT:
11448 IEM_MC_BEGIN(3, 3);
11449 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11450 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11451 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11452 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11454
11455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11456 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11457 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11458 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11459 IEM_MC_FETCH_EFLAGS(EFlags);
11460 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11461 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11462 else
11463 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11464
11465 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11466 IEM_MC_COMMIT_EFLAGS(EFlags);
11467 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11468 IEM_MC_ADVANCE_RIP_AND_FINISH();
11469 IEM_MC_END();
11470 break;
11471
11472 case IEMMODE_32BIT:
11473 IEM_MC_BEGIN(3, 3);
11474 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11475 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11476 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11477 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11479
11480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11481 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11482 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11483 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11484 IEM_MC_FETCH_EFLAGS(EFlags);
11485 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11486 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11487 else
11488 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11489
11490 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11491 IEM_MC_COMMIT_EFLAGS(EFlags);
11492 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11493 IEM_MC_ADVANCE_RIP_AND_FINISH();
11494 IEM_MC_END();
11495 break;
11496
11497 case IEMMODE_64BIT:
11498 IEM_MC_BEGIN(3, 3);
11499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11500 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11501 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11502 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11504
11505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11506 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11507 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11508 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11509 IEM_MC_FETCH_EFLAGS(EFlags);
11510 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11511 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11512 else
11513 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11514
11515 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11516 IEM_MC_COMMIT_EFLAGS(EFlags);
11517 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11518 IEM_MC_ADVANCE_RIP_AND_FINISH();
11519 IEM_MC_END();
11520 break;
11521
11522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11523 }
11524 }
11525}
11526
11527
11528/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11529FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11530{
11531 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11532
11533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11534 if (IEM_IS_MODRM_REG_MODE(bRm))
11535 {
11536 /*
11537 * XMM, XMM.
11538 */
11539 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11541 IEM_MC_BEGIN(4, 2);
11542 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11543 IEM_MC_LOCAL(X86XMMREG, Dst);
11544 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11545 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11546 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11547 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11548 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11549 IEM_MC_PREPARE_SSE_USAGE();
11550 IEM_MC_REF_MXCSR(pfMxcsr);
11551 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11552 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11553 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11554 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11555 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11556 } IEM_MC_ELSE() {
11557 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11558 } IEM_MC_ENDIF();
11559
11560 IEM_MC_ADVANCE_RIP_AND_FINISH();
11561 IEM_MC_END();
11562 }
11563 else
11564 {
11565 /*
11566 * XMM, [mem128].
11567 */
11568 IEM_MC_BEGIN(4, 3);
11569 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11570 IEM_MC_LOCAL(X86XMMREG, Dst);
11571 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11572 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11573 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11575
11576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11577 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11578 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11580 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11581 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11582
11583 IEM_MC_PREPARE_SSE_USAGE();
11584 IEM_MC_REF_MXCSR(pfMxcsr);
11585 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11586 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11587 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11588 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11589 } IEM_MC_ELSE() {
11590 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11591 } IEM_MC_ENDIF();
11592
11593 IEM_MC_ADVANCE_RIP_AND_FINISH();
11594 IEM_MC_END();
11595 }
11596}
11597
11598
11599/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11600FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11601{
11602 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11603
11604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11605 if (IEM_IS_MODRM_REG_MODE(bRm))
11606 {
11607 /*
11608 * XMM, XMM.
11609 */
11610 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11612 IEM_MC_BEGIN(4, 2);
11613 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11614 IEM_MC_LOCAL(X86XMMREG, Dst);
11615 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11616 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11617 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11618 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11619 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11620 IEM_MC_PREPARE_SSE_USAGE();
11621 IEM_MC_REF_MXCSR(pfMxcsr);
11622 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11623 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11624 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11625 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11626 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11627 } IEM_MC_ELSE() {
11628 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11629 } IEM_MC_ENDIF();
11630
11631 IEM_MC_ADVANCE_RIP_AND_FINISH();
11632 IEM_MC_END();
11633 }
11634 else
11635 {
11636 /*
11637 * XMM, [mem128].
11638 */
11639 IEM_MC_BEGIN(4, 3);
11640 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11641 IEM_MC_LOCAL(X86XMMREG, Dst);
11642 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11643 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11644 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11646
11647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11648 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11649 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11652 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11653
11654 IEM_MC_PREPARE_SSE_USAGE();
11655 IEM_MC_REF_MXCSR(pfMxcsr);
11656 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11657 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11658 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11659 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11660 } IEM_MC_ELSE() {
11661 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11662 } IEM_MC_ENDIF();
11663
11664 IEM_MC_ADVANCE_RIP_AND_FINISH();
11665 IEM_MC_END();
11666 }
11667}
11668
11669
11670/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11671FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11672{
11673 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11674
11675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11676 if (IEM_IS_MODRM_REG_MODE(bRm))
11677 {
11678 /*
11679 * XMM32, XMM32.
11680 */
11681 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11683 IEM_MC_BEGIN(4, 2);
11684 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11685 IEM_MC_LOCAL(X86XMMREG, Dst);
11686 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11687 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11688 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11689 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11690 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11691 IEM_MC_PREPARE_SSE_USAGE();
11692 IEM_MC_REF_MXCSR(pfMxcsr);
11693 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11694 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11695 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11696 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11697 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11698 } IEM_MC_ELSE() {
11699 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11700 } IEM_MC_ENDIF();
11701
11702 IEM_MC_ADVANCE_RIP_AND_FINISH();
11703 IEM_MC_END();
11704 }
11705 else
11706 {
11707 /*
11708 * XMM32, [mem32].
11709 */
11710 IEM_MC_BEGIN(4, 3);
11711 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11712 IEM_MC_LOCAL(X86XMMREG, Dst);
11713 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11714 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11715 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11717
11718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11719 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11720 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11722 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11723 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11724
11725 IEM_MC_PREPARE_SSE_USAGE();
11726 IEM_MC_REF_MXCSR(pfMxcsr);
11727 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11728 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11729 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11730 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11731 } IEM_MC_ELSE() {
11732 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11733 } IEM_MC_ENDIF();
11734
11735 IEM_MC_ADVANCE_RIP_AND_FINISH();
11736 IEM_MC_END();
11737 }
11738}
11739
11740
11741/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11742FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11743{
11744 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11745
11746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11747 if (IEM_IS_MODRM_REG_MODE(bRm))
11748 {
11749 /*
11750 * XMM64, XMM64.
11751 */
11752 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11754 IEM_MC_BEGIN(4, 2);
11755 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11756 IEM_MC_LOCAL(X86XMMREG, Dst);
11757 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11758 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11759 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11760 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11761 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11762 IEM_MC_PREPARE_SSE_USAGE();
11763 IEM_MC_REF_MXCSR(pfMxcsr);
11764 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11765 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11766 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11767 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11768 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11769 } IEM_MC_ELSE() {
11770 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11771 } IEM_MC_ENDIF();
11772
11773 IEM_MC_ADVANCE_RIP_AND_FINISH();
11774 IEM_MC_END();
11775 }
11776 else
11777 {
11778 /*
11779 * XMM64, [mem64].
11780 */
11781 IEM_MC_BEGIN(4, 3);
11782 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11783 IEM_MC_LOCAL(X86XMMREG, Dst);
11784 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11785 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11786 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11788
11789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11790 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11791 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11793 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11794 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11795
11796 IEM_MC_PREPARE_SSE_USAGE();
11797 IEM_MC_REF_MXCSR(pfMxcsr);
11798 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11799 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11800 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11801 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11802 } IEM_MC_ELSE() {
11803 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11804 } IEM_MC_ENDIF();
11805
11806 IEM_MC_ADVANCE_RIP_AND_FINISH();
11807 IEM_MC_END();
11808 }
11809}
11810
11811
11812/** Opcode 0x0f 0xc3. */
11813FNIEMOP_DEF(iemOp_movnti_My_Gy)
11814{
11815 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11816
11817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11818
11819 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11820 if (IEM_IS_MODRM_MEM_MODE(bRm))
11821 {
11822 switch (pVCpu->iem.s.enmEffOpSize)
11823 {
11824 case IEMMODE_32BIT:
11825 IEM_MC_BEGIN(0, 2);
11826 IEM_MC_LOCAL(uint32_t, u32Value);
11827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11828
11829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11831 IEMOP_HLP_RAISE_UD_IF_MISSING_GUEST_FEATURE(pVCpu, fSse2);
11832
11833 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11834 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11835 IEM_MC_ADVANCE_RIP_AND_FINISH();
11836 IEM_MC_END();
11837 break;
11838
11839 case IEMMODE_64BIT:
11840 IEM_MC_BEGIN(0, 2);
11841 IEM_MC_LOCAL(uint64_t, u64Value);
11842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11843
11844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11846 IEMOP_HLP_RAISE_UD_IF_MISSING_GUEST_FEATURE(pVCpu, fSse2);
11847
11848 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11849 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11850 IEM_MC_ADVANCE_RIP_AND_FINISH();
11851 IEM_MC_END();
11852 break;
11853
11854 case IEMMODE_16BIT:
11855 /** @todo check this form. */
11856 return IEMOP_RAISE_INVALID_OPCODE();
11857
11858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11859 }
11860 }
11861 else
11862 return IEMOP_RAISE_INVALID_OPCODE();
11863}
11864
11865
11866/* Opcode 0x66 0x0f 0xc3 - invalid */
11867/* Opcode 0xf3 0x0f 0xc3 - invalid */
11868/* Opcode 0xf2 0x0f 0xc3 - invalid */
11869
11870
11871/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11872FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11873{
11874 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
11875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11876 if (IEM_IS_MODRM_REG_MODE(bRm))
11877 {
11878 /*
11879 * Register, register.
11880 */
11881 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11883 IEM_MC_BEGIN(3, 0);
11884 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11885 IEM_MC_ARG(uint16_t, u16Src, 1);
11886 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11887 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11888 IEM_MC_PREPARE_FPU_USAGE();
11889 IEM_MC_FPU_TO_MMX_MODE();
11890 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11891 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11892 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11893 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11894 IEM_MC_ADVANCE_RIP_AND_FINISH();
11895 IEM_MC_END();
11896 }
11897 else
11898 {
11899 /*
11900 * Register, memory.
11901 */
11902 IEM_MC_BEGIN(3, 1);
11903 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11904 IEM_MC_ARG(uint16_t, u16Src, 1);
11905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11906
11907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11908 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11909 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11911 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11912 IEM_MC_PREPARE_FPU_USAGE();
11913 IEM_MC_FPU_TO_MMX_MODE();
11914
11915 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11916 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11917 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11918 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11919 IEM_MC_ADVANCE_RIP_AND_FINISH();
11920 IEM_MC_END();
11921 }
11922}
11923
11924
11925/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11926FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11927{
11928 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11930 if (IEM_IS_MODRM_REG_MODE(bRm))
11931 {
11932 /*
11933 * Register, register.
11934 */
11935 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11937 IEM_MC_BEGIN(3, 0);
11938 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11939 IEM_MC_ARG(uint16_t, u16Src, 1);
11940 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11941 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11942 IEM_MC_PREPARE_SSE_USAGE();
11943 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11944 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11945 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11946 IEM_MC_ADVANCE_RIP_AND_FINISH();
11947 IEM_MC_END();
11948 }
11949 else
11950 {
11951 /*
11952 * Register, memory.
11953 */
11954 IEM_MC_BEGIN(3, 2);
11955 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11956 IEM_MC_ARG(uint16_t, u16Src, 1);
11957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11958
11959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11960 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11961 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11963 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11964 IEM_MC_PREPARE_SSE_USAGE();
11965
11966 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11967 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11968 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11969 IEM_MC_ADVANCE_RIP_AND_FINISH();
11970 IEM_MC_END();
11971 }
11972}
11973
11974
11975/* Opcode 0xf3 0x0f 0xc4 - invalid */
11976/* Opcode 0xf2 0x0f 0xc4 - invalid */
11977
11978
11979/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11980FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11981{
11982 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
11983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11984 if (IEM_IS_MODRM_REG_MODE(bRm))
11985 {
11986 /*
11987 * Greg32, MMX, imm8.
11988 */
11989 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11991 IEM_MC_BEGIN(3, 1);
11992 IEM_MC_LOCAL(uint16_t, u16Dst);
11993 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11994 IEM_MC_ARG(uint64_t, u64Src, 1);
11995 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11996 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11997 IEM_MC_PREPARE_FPU_USAGE();
11998 IEM_MC_FPU_TO_MMX_MODE();
11999 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12000 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12001 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12002 IEM_MC_ADVANCE_RIP_AND_FINISH();
12003 IEM_MC_END();
12004 }
12005 /* No memory operand. */
12006 else
12007 return IEMOP_RAISE_INVALID_OPCODE();
12008}
12009
12010
12011/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12012FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12013{
12014 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12016 if (IEM_IS_MODRM_REG_MODE(bRm))
12017 {
12018 /*
12019 * Greg32, XMM, imm8.
12020 */
12021 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12023 IEM_MC_BEGIN(3, 1);
12024 IEM_MC_LOCAL(uint16_t, u16Dst);
12025 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12026 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12027 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12028 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12029 IEM_MC_PREPARE_SSE_USAGE();
12030 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12031 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12032 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12033 IEM_MC_ADVANCE_RIP_AND_FINISH();
12034 IEM_MC_END();
12035 }
12036 /* No memory operand. */
12037 else
12038 return IEMOP_RAISE_INVALID_OPCODE();
12039}
12040
12041
12042/* Opcode 0xf3 0x0f 0xc5 - invalid */
12043/* Opcode 0xf2 0x0f 0xc5 - invalid */
12044
12045
12046/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12047FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12048{
12049 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12051 if (IEM_IS_MODRM_REG_MODE(bRm))
12052 {
12053 /*
12054 * XMM, XMM, imm8.
12055 */
12056 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12058 IEM_MC_BEGIN(3, 0);
12059 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12060 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12061 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12062 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12063 IEM_MC_PREPARE_SSE_USAGE();
12064 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12065 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12066 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12067 IEM_MC_ADVANCE_RIP_AND_FINISH();
12068 IEM_MC_END();
12069 }
12070 else
12071 {
12072 /*
12073 * XMM, [mem128], imm8.
12074 */
12075 IEM_MC_BEGIN(3, 2);
12076 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12077 IEM_MC_LOCAL(RTUINT128U, uSrc);
12078 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12080
12081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12082 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12083 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12085 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12086 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12087
12088 IEM_MC_PREPARE_SSE_USAGE();
12089 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12090 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12091
12092 IEM_MC_ADVANCE_RIP_AND_FINISH();
12093 IEM_MC_END();
12094 }
12095}
12096
12097
12098/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12099FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12100{
12101 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12103 if (IEM_IS_MODRM_REG_MODE(bRm))
12104 {
12105 /*
12106 * XMM, XMM, imm8.
12107 */
12108 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12110 IEM_MC_BEGIN(3, 0);
12111 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12112 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12113 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12114 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12115 IEM_MC_PREPARE_SSE_USAGE();
12116 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12117 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12118 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12119 IEM_MC_ADVANCE_RIP_AND_FINISH();
12120 IEM_MC_END();
12121 }
12122 else
12123 {
12124 /*
12125 * XMM, [mem128], imm8.
12126 */
12127 IEM_MC_BEGIN(3, 2);
12128 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12129 IEM_MC_LOCAL(RTUINT128U, uSrc);
12130 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12132
12133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12134 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12135 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12137 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12138 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12139
12140 IEM_MC_PREPARE_SSE_USAGE();
12141 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12142 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12143
12144 IEM_MC_ADVANCE_RIP_AND_FINISH();
12145 IEM_MC_END();
12146 }
12147}
12148
12149
12150/* Opcode 0xf3 0x0f 0xc6 - invalid */
12151/* Opcode 0xf2 0x0f 0xc6 - invalid */
12152
12153
12154/** Opcode 0x0f 0xc7 !11/1. */
12155FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12156{
12157 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12158
12159 IEM_MC_BEGIN(4, 3);
12160 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12161 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12162 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12163 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12164 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12165 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12167
12168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12169 IEMOP_HLP_DONE_DECODING();
12170 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12171
12172 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12173 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12174 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12175
12176 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12177 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12178 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12179
12180 IEM_MC_FETCH_EFLAGS(EFlags);
12181 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12182 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12183 else
12184 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12185
12186 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12187 IEM_MC_COMMIT_EFLAGS(EFlags);
12188 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12189 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12190 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12191 } IEM_MC_ENDIF();
12192 IEM_MC_ADVANCE_RIP_AND_FINISH();
12193
12194 IEM_MC_END();
12195}
12196
12197
12198/** Opcode REX.W 0x0f 0xc7 !11/1. */
12199FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12200{
12201 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12202 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12203 {
12204 IEM_MC_BEGIN(4, 3);
12205 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12206 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12207 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12208 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12209 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12210 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12212
12213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12214 IEMOP_HLP_DONE_DECODING();
12215 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12216 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12217
12218 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12219 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12220 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12221
12222 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12223 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12224 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12225
12226 IEM_MC_FETCH_EFLAGS(EFlags);
12227
12228#ifdef RT_ARCH_AMD64 /* some code duplication here because IEMAllInstructionsPython.py cannot parse if/else/#if spaghetti. */
12229 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12230 {
12231 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12232 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12233 else
12234 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12235 }
12236 else
12237 { /* (see comments in #else case below) */
12238 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12239 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12240 else
12241 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12242 }
12243
12244#elif defined(RT_ARCH_ARM64)
12245 /** @todo may require fallback for unaligned accesses... */
12246 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12247 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12248 else
12249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12250
12251#else
12252 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12253 accesses and not all all atomic, which works fine on in UNI CPU guest
12254 configuration (ignoring DMA). If guest SMP is active we have no choice
12255 but to use a rendezvous callback here. Sigh. */
12256 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12257 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12258 else
12259 {
12260 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12261 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12262 }
12263#endif
12264
12265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12266 IEM_MC_COMMIT_EFLAGS(EFlags);
12267 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12268 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12269 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12270 } IEM_MC_ENDIF();
12271 IEM_MC_ADVANCE_RIP_AND_FINISH();
12272
12273 IEM_MC_END();
12274 }
12275 Log(("cmpxchg16b -> #UD\n"));
12276 return IEMOP_RAISE_INVALID_OPCODE();
12277}
12278
12279FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12280{
12281 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12282 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12283 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12284}
12285
12286
12287/** Opcode 0x0f 0xc7 11/6. */
12288FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12289{
12290 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12291 return IEMOP_RAISE_INVALID_OPCODE();
12292
12293 if (IEM_IS_MODRM_REG_MODE(bRm))
12294 {
12295 /* register destination. */
12296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12297 switch (pVCpu->iem.s.enmEffOpSize)
12298 {
12299 case IEMMODE_16BIT:
12300 IEM_MC_BEGIN(2, 0);
12301 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12302 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12303
12304 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12305 IEM_MC_REF_EFLAGS(pEFlags);
12306 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback),
12307 pu16Dst, pEFlags);
12308
12309 IEM_MC_ADVANCE_RIP_AND_FINISH();
12310 IEM_MC_END();
12311 break;
12312
12313 case IEMMODE_32BIT:
12314 IEM_MC_BEGIN(2, 0);
12315 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12316 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12317
12318 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12319 IEM_MC_REF_EFLAGS(pEFlags);
12320 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback),
12321 pu32Dst, pEFlags);
12322
12323 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12324 IEM_MC_ADVANCE_RIP_AND_FINISH();
12325 IEM_MC_END();
12326 break;
12327
12328 case IEMMODE_64BIT:
12329 IEM_MC_BEGIN(2, 0);
12330 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12331 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12332
12333 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12334 IEM_MC_REF_EFLAGS(pEFlags);
12335 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback),
12336 pu64Dst, pEFlags);
12337
12338 IEM_MC_ADVANCE_RIP_AND_FINISH();
12339 IEM_MC_END();
12340 break;
12341
12342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12343 }
12344 }
12345 /* Register only. */
12346 else
12347 return IEMOP_RAISE_INVALID_OPCODE();
12348}
12349
12350/** Opcode 0x0f 0xc7 !11/6. */
12351#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12352FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12353{
12354 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12355 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12356 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12357 IEM_MC_BEGIN(2, 0);
12358 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12359 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12361 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12362 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12363 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12364 IEM_MC_END();
12365}
12366#else
12367FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12368#endif
12369
12370/** Opcode 0x66 0x0f 0xc7 !11/6. */
12371#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12372FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12373{
12374 IEMOP_MNEMONIC(vmclear, "vmclear");
12375 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12376 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12377 IEM_MC_BEGIN(2, 0);
12378 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12379 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12381 IEMOP_HLP_DONE_DECODING();
12382 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12383 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12384 IEM_MC_END();
12385}
12386#else
12387FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12388#endif
12389
12390/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12391#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12392FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12393{
12394 IEMOP_MNEMONIC(vmxon, "vmxon");
12395 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12396 IEM_MC_BEGIN(2, 0);
12397 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12398 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12400 IEMOP_HLP_DONE_DECODING();
12401 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12402 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12403 IEM_MC_END();
12404}
12405#else
12406FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12407#endif
12408
12409/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12410#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12411FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12412{
12413 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12414 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12415 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12416 IEM_MC_BEGIN(2, 0);
12417 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12418 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12420 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12421 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12422 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12423 IEM_MC_END();
12424}
12425#else
12426FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12427#endif
12428
12429/** Opcode 0x0f 0xc7 11/7. */
12430FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12431{
12432 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12433 return IEMOP_RAISE_INVALID_OPCODE();
12434
12435 if (IEM_IS_MODRM_REG_MODE(bRm))
12436 {
12437 /* register destination. */
12438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12439 switch (pVCpu->iem.s.enmEffOpSize)
12440 {
12441 case IEMMODE_16BIT:
12442 IEM_MC_BEGIN(2, 0);
12443 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12444 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12445
12446 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12447 IEM_MC_REF_EFLAGS(pEFlags);
12448 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback),
12449 pu16Dst, pEFlags);
12450
12451 IEM_MC_ADVANCE_RIP_AND_FINISH();
12452 IEM_MC_END();
12453 break;
12454
12455 case IEMMODE_32BIT:
12456 IEM_MC_BEGIN(2, 0);
12457 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12458 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12459
12460 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12461 IEM_MC_REF_EFLAGS(pEFlags);
12462 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback),
12463 pu32Dst, pEFlags);
12464
12465 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12466 IEM_MC_ADVANCE_RIP_AND_FINISH();
12467 IEM_MC_END();
12468 break;
12469
12470 case IEMMODE_64BIT:
12471 IEM_MC_BEGIN(2, 0);
12472 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12473 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12474
12475 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12476 IEM_MC_REF_EFLAGS(pEFlags);
12477 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback),
12478 pu64Dst, pEFlags);
12479
12480 IEM_MC_ADVANCE_RIP_AND_FINISH();
12481 IEM_MC_END();
12482 break;
12483
12484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12485 }
12486 }
12487 /* Register only. */
12488 else
12489 return IEMOP_RAISE_INVALID_OPCODE();
12490}
12491
12492/**
12493 * Group 9 jump table for register variant.
12494 */
12495IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12496{ /* pfx: none, 066h, 0f3h, 0f2h */
12497 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12498 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12499 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12500 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12501 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12502 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12503 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12504 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12505};
12506AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12507
12508
12509/**
12510 * Group 9 jump table for memory variant.
12511 */
12512IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12513{ /* pfx: none, 066h, 0f3h, 0f2h */
12514 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12515 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12516 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12517 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12518 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12519 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12520 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12521 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12522};
12523AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12524
12525
12526/** Opcode 0x0f 0xc7. */
12527FNIEMOP_DEF(iemOp_Grp9)
12528{
12529 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
12530 if (IEM_IS_MODRM_REG_MODE(bRm))
12531 /* register, register */
12532 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12533 + pVCpu->iem.s.idxPrefix], bRm);
12534 /* memory, register */
12535 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12536 + pVCpu->iem.s.idxPrefix], bRm);
12537}
12538
12539
12540/**
12541 * Common 'bswap register' helper.
12542 */
12543FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12544{
12545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12546 switch (pVCpu->iem.s.enmEffOpSize)
12547 {
12548 case IEMMODE_16BIT:
12549 IEM_MC_BEGIN(1, 0);
12550 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12551 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12552 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12553 IEM_MC_ADVANCE_RIP_AND_FINISH();
12554 IEM_MC_END();
12555 break;
12556
12557 case IEMMODE_32BIT:
12558 IEM_MC_BEGIN(1, 0);
12559 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12560 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12561 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12562 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12563 IEM_MC_ADVANCE_RIP_AND_FINISH();
12564 IEM_MC_END();
12565 break;
12566
12567 case IEMMODE_64BIT:
12568 IEM_MC_BEGIN(1, 0);
12569 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12570 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12571 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12572 IEM_MC_ADVANCE_RIP_AND_FINISH();
12573 IEM_MC_END();
12574 break;
12575
12576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12577 }
12578}
12579
12580
12581/** Opcode 0x0f 0xc8. */
12582FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12583{
12584 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12585 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12586 prefix. REX.B is the correct prefix it appears. For a parallel
12587 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12588 IEMOP_HLP_MIN_486();
12589 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12590}
12591
12592
12593/** Opcode 0x0f 0xc9. */
12594FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12595{
12596 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12597 IEMOP_HLP_MIN_486();
12598 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12599}
12600
12601
12602/** Opcode 0x0f 0xca. */
12603FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12604{
12605 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12606 IEMOP_HLP_MIN_486();
12607 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12608}
12609
12610
12611/** Opcode 0x0f 0xcb. */
12612FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12613{
12614 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12615 IEMOP_HLP_MIN_486();
12616 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12617}
12618
12619
12620/** Opcode 0x0f 0xcc. */
12621FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12622{
12623 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12624 IEMOP_HLP_MIN_486();
12625 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12626}
12627
12628
12629/** Opcode 0x0f 0xcd. */
12630FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12631{
12632 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12633 IEMOP_HLP_MIN_486();
12634 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12635}
12636
12637
12638/** Opcode 0x0f 0xce. */
12639FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12640{
12641 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12642 IEMOP_HLP_MIN_486();
12643 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12644}
12645
12646
12647/** Opcode 0x0f 0xcf. */
12648FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12649{
12650 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12651 IEMOP_HLP_MIN_486();
12652 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12653}
12654
12655
12656/* Opcode 0x0f 0xd0 - invalid */
12657
12658
12659/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12660FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12661{
12662 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12663 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12664}
12665
12666
12667/* Opcode 0xf3 0x0f 0xd0 - invalid */
12668
12669
12670/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12671FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12672{
12673 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12674 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12675}
12676
12677
12678
12679/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12680FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12681{
12682 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12683 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12684}
12685
12686/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12687FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12688{
12689 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12690 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12691}
12692
12693/* Opcode 0xf3 0x0f 0xd1 - invalid */
12694/* Opcode 0xf2 0x0f 0xd1 - invalid */
12695
12696/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12697FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12698{
12699 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12700 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12701}
12702
12703
12704/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12705FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12706{
12707 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12708 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12709}
12710
12711
12712/* Opcode 0xf3 0x0f 0xd2 - invalid */
12713/* Opcode 0xf2 0x0f 0xd2 - invalid */
12714
12715/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12716FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12717{
12718 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12719 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12720}
12721
12722
12723/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12724FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12725{
12726 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12727 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12728}
12729
12730
12731/* Opcode 0xf3 0x0f 0xd3 - invalid */
12732/* Opcode 0xf2 0x0f 0xd3 - invalid */
12733
12734
12735/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12736FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12737{
12738 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12739 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12740}
12741
12742
12743/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12744FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12745{
12746 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12747 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12748}
12749
12750
12751/* Opcode 0xf3 0x0f 0xd4 - invalid */
12752/* Opcode 0xf2 0x0f 0xd4 - invalid */
12753
12754/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12755FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12756{
12757 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12758 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12759}
12760
12761/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12762FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12763{
12764 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12765 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12766}
12767
12768
12769/* Opcode 0xf3 0x0f 0xd5 - invalid */
12770/* Opcode 0xf2 0x0f 0xd5 - invalid */
12771
12772/* Opcode 0x0f 0xd6 - invalid */
12773
12774/**
12775 * @opcode 0xd6
12776 * @oppfx 0x66
12777 * @opcpuid sse2
12778 * @opgroup og_sse2_pcksclr_datamove
12779 * @opxcpttype none
12780 * @optest op1=-1 op2=2 -> op1=2
12781 * @optest op1=0 op2=-42 -> op1=-42
12782 */
12783FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12784{
12785 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12787 if (IEM_IS_MODRM_REG_MODE(bRm))
12788 {
12789 /*
12790 * Register, register.
12791 */
12792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12793 IEM_MC_BEGIN(0, 2);
12794 IEM_MC_LOCAL(uint64_t, uSrc);
12795
12796 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12798
12799 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12800 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12801
12802 IEM_MC_ADVANCE_RIP_AND_FINISH();
12803 IEM_MC_END();
12804 }
12805 else
12806 {
12807 /*
12808 * Memory, register.
12809 */
12810 IEM_MC_BEGIN(0, 2);
12811 IEM_MC_LOCAL(uint64_t, uSrc);
12812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12813
12814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12816 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12817 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12818
12819 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12820 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12821
12822 IEM_MC_ADVANCE_RIP_AND_FINISH();
12823 IEM_MC_END();
12824 }
12825}
12826
12827
12828/**
12829 * @opcode 0xd6
12830 * @opcodesub 11 mr/reg
12831 * @oppfx f3
12832 * @opcpuid sse2
12833 * @opgroup og_sse2_simdint_datamove
12834 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12835 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12836 */
12837FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12838{
12839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12840 if (IEM_IS_MODRM_REG_MODE(bRm))
12841 {
12842 /*
12843 * Register, register.
12844 */
12845 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12847 IEM_MC_BEGIN(0, 1);
12848 IEM_MC_LOCAL(uint64_t, uSrc);
12849
12850 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12851 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12852 IEM_MC_FPU_TO_MMX_MODE();
12853
12854 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12855 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12856
12857 IEM_MC_ADVANCE_RIP_AND_FINISH();
12858 IEM_MC_END();
12859 }
12860
12861 /**
12862 * @opdone
12863 * @opmnemonic udf30fd6mem
12864 * @opcode 0xd6
12865 * @opcodesub !11 mr/reg
12866 * @oppfx f3
12867 * @opunused intel-modrm
12868 * @opcpuid sse
12869 * @optest ->
12870 */
12871 else
12872 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12873}
12874
12875
12876/**
12877 * @opcode 0xd6
12878 * @opcodesub 11 mr/reg
12879 * @oppfx f2
12880 * @opcpuid sse2
12881 * @opgroup og_sse2_simdint_datamove
12882 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12883 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12884 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12885 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12886 * @optest op1=-42 op2=0xfedcba9876543210
12887 * -> op1=0xfedcba9876543210 ftw=0xff
12888 */
12889FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12890{
12891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12892 if (IEM_IS_MODRM_REG_MODE(bRm))
12893 {
12894 /*
12895 * Register, register.
12896 */
12897 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12899 IEM_MC_BEGIN(0, 1);
12900 IEM_MC_LOCAL(uint64_t, uSrc);
12901
12902 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12903 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12904 IEM_MC_FPU_TO_MMX_MODE();
12905
12906 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
12907 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12908
12909 IEM_MC_ADVANCE_RIP_AND_FINISH();
12910 IEM_MC_END();
12911 }
12912
12913 /**
12914 * @opdone
12915 * @opmnemonic udf20fd6mem
12916 * @opcode 0xd6
12917 * @opcodesub !11 mr/reg
12918 * @oppfx f2
12919 * @opunused intel-modrm
12920 * @opcpuid sse
12921 * @optest ->
12922 */
12923 else
12924 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12925}
12926
12927
12928/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12929FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12930{
12931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12932 /* Docs says register only. */
12933 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12934 {
12935 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12936 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
12937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12938 IEM_MC_BEGIN(2, 0);
12939 IEM_MC_ARG(uint64_t *, puDst, 0);
12940 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12941 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12942 IEM_MC_PREPARE_FPU_USAGE();
12943 IEM_MC_FPU_TO_MMX_MODE();
12944
12945 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12946 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12947 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12948
12949 IEM_MC_ADVANCE_RIP_AND_FINISH();
12950 IEM_MC_END();
12951 }
12952 else
12953 return IEMOP_RAISE_INVALID_OPCODE();
12954}
12955
12956
12957/** Opcode 0x66 0x0f 0xd7 - */
12958FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12959{
12960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12961 /* Docs says register only. */
12962 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12963 {
12964 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12965 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
12966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12967 IEM_MC_BEGIN(2, 0);
12968 IEM_MC_ARG(uint64_t *, puDst, 0);
12969 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12970 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12971 IEM_MC_PREPARE_SSE_USAGE();
12972 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12973 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12974 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12975 IEM_MC_ADVANCE_RIP_AND_FINISH();
12976 IEM_MC_END();
12977 }
12978 else
12979 return IEMOP_RAISE_INVALID_OPCODE();
12980}
12981
12982
12983/* Opcode 0xf3 0x0f 0xd7 - invalid */
12984/* Opcode 0xf2 0x0f 0xd7 - invalid */
12985
12986
12987/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12988FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12989{
12990 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12991 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12992}
12993
12994
12995/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12996FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12997{
12998 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12999 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13000}
13001
13002
13003/* Opcode 0xf3 0x0f 0xd8 - invalid */
13004/* Opcode 0xf2 0x0f 0xd8 - invalid */
13005
13006/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13007FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13008{
13009 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13010 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13011}
13012
13013
13014/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13015FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13016{
13017 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13018 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13019}
13020
13021
13022/* Opcode 0xf3 0x0f 0xd9 - invalid */
13023/* Opcode 0xf2 0x0f 0xd9 - invalid */
13024
13025/** Opcode 0x0f 0xda - pminub Pq, Qq */
13026FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13027{
13028 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13029 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13030}
13031
13032
13033/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13034FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13035{
13036 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13037 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13038}
13039
13040/* Opcode 0xf3 0x0f 0xda - invalid */
13041/* Opcode 0xf2 0x0f 0xda - invalid */
13042
13043/** Opcode 0x0f 0xdb - pand Pq, Qq */
13044FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13045{
13046 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13047 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13048}
13049
13050
13051/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13052FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13053{
13054 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13055 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13056}
13057
13058
13059/* Opcode 0xf3 0x0f 0xdb - invalid */
13060/* Opcode 0xf2 0x0f 0xdb - invalid */
13061
13062/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13063FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13064{
13065 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13066 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13067}
13068
13069
13070/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13071FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13072{
13073 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13074 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13075}
13076
13077
13078/* Opcode 0xf3 0x0f 0xdc - invalid */
13079/* Opcode 0xf2 0x0f 0xdc - invalid */
13080
13081/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13082FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13083{
13084 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13085 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13086}
13087
13088
13089/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13090FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13091{
13092 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13093 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13094}
13095
13096
13097/* Opcode 0xf3 0x0f 0xdd - invalid */
13098/* Opcode 0xf2 0x0f 0xdd - invalid */
13099
13100/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13101FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13102{
13103 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13104 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13105}
13106
13107
13108/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13109FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13110{
13111 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13112 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13113}
13114
13115/* Opcode 0xf3 0x0f 0xde - invalid */
13116/* Opcode 0xf2 0x0f 0xde - invalid */
13117
13118
13119/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13120FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13121{
13122 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13123 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13124}
13125
13126
13127/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13128FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13129{
13130 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13131 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13132}
13133
13134
13135/* Opcode 0xf3 0x0f 0xdf - invalid */
13136/* Opcode 0xf2 0x0f 0xdf - invalid */
13137
13138/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13139FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13140{
13141 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13142 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13143}
13144
13145
13146/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13147FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13148{
13149 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13150 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13151}
13152
13153
13154/* Opcode 0xf3 0x0f 0xe0 - invalid */
13155/* Opcode 0xf2 0x0f 0xe0 - invalid */
13156
13157/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13158FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13159{
13160 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13161 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13162}
13163
13164
13165/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13166FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13167{
13168 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13169 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13170}
13171
13172
13173/* Opcode 0xf3 0x0f 0xe1 - invalid */
13174/* Opcode 0xf2 0x0f 0xe1 - invalid */
13175
13176/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13177FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13178{
13179 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13180 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13181}
13182
13183
13184/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13185FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13186{
13187 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13188 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13189}
13190
13191
13192/* Opcode 0xf3 0x0f 0xe2 - invalid */
13193/* Opcode 0xf2 0x0f 0xe2 - invalid */
13194
13195/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13196FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13197{
13198 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13199 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13200}
13201
13202
13203/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13204FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13205{
13206 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13207 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13208}
13209
13210
13211/* Opcode 0xf3 0x0f 0xe3 - invalid */
13212/* Opcode 0xf2 0x0f 0xe3 - invalid */
13213
13214/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13215FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13216{
13217 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13218 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13219}
13220
13221
13222/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13223FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13224{
13225 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13226 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13227}
13228
13229
13230/* Opcode 0xf3 0x0f 0xe4 - invalid */
13231/* Opcode 0xf2 0x0f 0xe4 - invalid */
13232
13233/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13234FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13235{
13236 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13237 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13238}
13239
13240
13241/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13242FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13243{
13244 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13245 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13246}
13247
13248
13249/* Opcode 0xf3 0x0f 0xe5 - invalid */
13250/* Opcode 0xf2 0x0f 0xe5 - invalid */
13251/* Opcode 0x0f 0xe6 - invalid */
13252
13253
13254/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13255FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13256{
13257 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13258 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13259}
13260
13261
13262/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13263FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13264{
13265 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13266 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13267}
13268
13269
13270/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13271FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13272{
13273 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13274 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13275}
13276
13277
13278/**
13279 * @opcode 0xe7
13280 * @opcodesub !11 mr/reg
13281 * @oppfx none
13282 * @opcpuid sse
13283 * @opgroup og_sse1_cachect
13284 * @opxcpttype none
13285 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13286 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13287 */
13288FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13289{
13290 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13292 if (IEM_IS_MODRM_MEM_MODE(bRm))
13293 {
13294 /* Register, memory. */
13295 IEM_MC_BEGIN(0, 2);
13296 IEM_MC_LOCAL(uint64_t, uSrc);
13297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13298
13299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13302 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13303 IEM_MC_FPU_TO_MMX_MODE();
13304
13305 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13306 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13307
13308 IEM_MC_ADVANCE_RIP_AND_FINISH();
13309 IEM_MC_END();
13310 }
13311 /**
13312 * @opdone
13313 * @opmnemonic ud0fe7reg
13314 * @opcode 0xe7
13315 * @opcodesub 11 mr/reg
13316 * @oppfx none
13317 * @opunused immediate
13318 * @opcpuid sse
13319 * @optest ->
13320 */
13321 else
13322 return IEMOP_RAISE_INVALID_OPCODE();
13323}
13324
13325/**
13326 * @opcode 0xe7
13327 * @opcodesub !11 mr/reg
13328 * @oppfx 0x66
13329 * @opcpuid sse2
13330 * @opgroup og_sse2_cachect
13331 * @opxcpttype 1
13332 * @optest op1=-1 op2=2 -> op1=2
13333 * @optest op1=0 op2=-42 -> op1=-42
13334 */
13335FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13336{
13337 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13339 if (IEM_IS_MODRM_MEM_MODE(bRm))
13340 {
13341 /* Register, memory. */
13342 IEM_MC_BEGIN(0, 2);
13343 IEM_MC_LOCAL(RTUINT128U, uSrc);
13344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13345
13346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13348 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
13349 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13350
13351 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13352 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13353
13354 IEM_MC_ADVANCE_RIP_AND_FINISH();
13355 IEM_MC_END();
13356 }
13357
13358 /**
13359 * @opdone
13360 * @opmnemonic ud660fe7reg
13361 * @opcode 0xe7
13362 * @opcodesub 11 mr/reg
13363 * @oppfx 0x66
13364 * @opunused immediate
13365 * @opcpuid sse
13366 * @optest ->
13367 */
13368 else
13369 return IEMOP_RAISE_INVALID_OPCODE();
13370}
13371
13372/* Opcode 0xf3 0x0f 0xe7 - invalid */
13373/* Opcode 0xf2 0x0f 0xe7 - invalid */
13374
13375
13376/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13377FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13378{
13379 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13380 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13381}
13382
13383
13384/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13385FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13386{
13387 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13388 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13389}
13390
13391
13392/* Opcode 0xf3 0x0f 0xe8 - invalid */
13393/* Opcode 0xf2 0x0f 0xe8 - invalid */
13394
13395/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13396FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13397{
13398 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13399 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13400}
13401
13402
13403/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13404FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13405{
13406 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13407 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13408}
13409
13410
13411/* Opcode 0xf3 0x0f 0xe9 - invalid */
13412/* Opcode 0xf2 0x0f 0xe9 - invalid */
13413
13414
13415/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13416FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13417{
13418 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13419 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13420}
13421
13422
13423/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13424FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13425{
13426 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13427 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13428}
13429
13430
13431/* Opcode 0xf3 0x0f 0xea - invalid */
13432/* Opcode 0xf2 0x0f 0xea - invalid */
13433
13434
13435/** Opcode 0x0f 0xeb - por Pq, Qq */
13436FNIEMOP_DEF(iemOp_por_Pq_Qq)
13437{
13438 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13439 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13440}
13441
13442
13443/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13444FNIEMOP_DEF(iemOp_por_Vx_Wx)
13445{
13446 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13447 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13448}
13449
13450
13451/* Opcode 0xf3 0x0f 0xeb - invalid */
13452/* Opcode 0xf2 0x0f 0xeb - invalid */
13453
13454/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13455FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13456{
13457 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13458 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13459}
13460
13461
13462/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13463FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13464{
13465 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13466 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13467}
13468
13469
13470/* Opcode 0xf3 0x0f 0xec - invalid */
13471/* Opcode 0xf2 0x0f 0xec - invalid */
13472
13473/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13474FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13475{
13476 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13477 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13478}
13479
13480
13481/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13482FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13483{
13484 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13485 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13486}
13487
13488
13489/* Opcode 0xf3 0x0f 0xed - invalid */
13490/* Opcode 0xf2 0x0f 0xed - invalid */
13491
13492
13493/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13494FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13495{
13496 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13497 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13498}
13499
13500
13501/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13502FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13503{
13504 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13505 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13506}
13507
13508
13509/* Opcode 0xf3 0x0f 0xee - invalid */
13510/* Opcode 0xf2 0x0f 0xee - invalid */
13511
13512
13513/** Opcode 0x0f 0xef - pxor Pq, Qq */
13514FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13515{
13516 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13517 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13518}
13519
13520
13521/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13522FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13523{
13524 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13525 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13526}
13527
13528
13529/* Opcode 0xf3 0x0f 0xef - invalid */
13530/* Opcode 0xf2 0x0f 0xef - invalid */
13531
13532/* Opcode 0x0f 0xf0 - invalid */
13533/* Opcode 0x66 0x0f 0xf0 - invalid */
13534
13535
13536/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13537FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13538{
13539 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13541 if (IEM_IS_MODRM_REG_MODE(bRm))
13542 {
13543 /*
13544 * Register, register - (not implemented, assuming it raises \#UD).
13545 */
13546 return IEMOP_RAISE_INVALID_OPCODE();
13547 }
13548 else
13549 {
13550 /*
13551 * Register, memory.
13552 */
13553 IEM_MC_BEGIN(0, 2);
13554 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13556
13557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13559 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
13560 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13561 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13562 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13563
13564 IEM_MC_ADVANCE_RIP_AND_FINISH();
13565 IEM_MC_END();
13566 }
13567}
13568
13569
13570/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13571FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13572{
13573 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13574 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13575}
13576
13577
13578/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13579FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13580{
13581 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13582 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13583}
13584
13585
13586/* Opcode 0xf2 0x0f 0xf1 - invalid */
13587
13588/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13589FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13590{
13591 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13592 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13593}
13594
13595
13596/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13597FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13598{
13599 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13600 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13601}
13602
13603
13604/* Opcode 0xf2 0x0f 0xf2 - invalid */
13605
13606/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13607FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13608{
13609 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13610 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13611}
13612
13613
13614/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13615FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13616{
13617 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13618 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13619}
13620
13621/* Opcode 0xf2 0x0f 0xf3 - invalid */
13622
13623/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13624FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13625{
13626 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13627 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13628}
13629
13630
13631/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13632FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13633{
13634 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13635 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13636}
13637
13638
13639/* Opcode 0xf2 0x0f 0xf4 - invalid */
13640
13641/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13642FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13643{
13644 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13645 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13646}
13647
13648
13649/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13650FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13651{
13652 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13653 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13654}
13655
13656/* Opcode 0xf2 0x0f 0xf5 - invalid */
13657
13658/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13659FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13660{
13661 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13662 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13663}
13664
13665
13666/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13667FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13668{
13669 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13670 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13671}
13672
13673
13674/* Opcode 0xf2 0x0f 0xf6 - invalid */
13675
13676/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13677FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13678/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13679FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13680/* Opcode 0xf2 0x0f 0xf7 - invalid */
13681
13682
13683/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13684FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13685{
13686 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13687 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13688}
13689
13690
13691/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13692FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13693{
13694 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13695 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13696}
13697
13698
13699/* Opcode 0xf2 0x0f 0xf8 - invalid */
13700
13701
13702/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13703FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13704{
13705 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13706 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13707}
13708
13709
13710/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13711FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13712{
13713 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13714 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13715}
13716
13717
13718/* Opcode 0xf2 0x0f 0xf9 - invalid */
13719
13720
13721/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13722FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13723{
13724 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13725 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13726}
13727
13728
13729/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13730FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13731{
13732 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13733 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13734}
13735
13736
13737/* Opcode 0xf2 0x0f 0xfa - invalid */
13738
13739
13740/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13741FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13742{
13743 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13744 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13745}
13746
13747
13748/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13749FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13750{
13751 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13752 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13753}
13754
13755
13756/* Opcode 0xf2 0x0f 0xfb - invalid */
13757
13758
13759/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13760FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13761{
13762 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13763 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13764}
13765
13766
13767/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13768FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13769{
13770 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13771 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13772}
13773
13774
13775/* Opcode 0xf2 0x0f 0xfc - invalid */
13776
13777
13778/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13779FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13780{
13781 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13782 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13783}
13784
13785
13786/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13787FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13788{
13789 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13790 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13791}
13792
13793
13794/* Opcode 0xf2 0x0f 0xfd - invalid */
13795
13796
13797/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13798FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13799{
13800 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13801 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13802}
13803
13804
13805/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13806FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13807{
13808 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13809 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13810}
13811
13812
13813/* Opcode 0xf2 0x0f 0xfe - invalid */
13814
13815
13816/** Opcode **** 0x0f 0xff - UD0 */
13817FNIEMOP_DEF(iemOp_ud0)
13818{
13819 IEMOP_MNEMONIC(ud0, "ud0");
13820 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13821 {
13822 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13823#ifndef TST_IEM_CHECK_MC
13824 if (IEM_IS_MODRM_MEM_MODE(bRm))
13825 {
13826 RTGCPTR GCPtrEff;
13827 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13828 if (rcStrict != VINF_SUCCESS)
13829 return rcStrict;
13830 }
13831#endif
13832 IEMOP_HLP_DONE_DECODING();
13833 }
13834 return IEMOP_RAISE_INVALID_OPCODE();
13835}
13836
13837
13838
13839/**
13840 * Two byte opcode map, first byte 0x0f.
13841 *
13842 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13843 * check if it needs updating as well when making changes.
13844 */
13845IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13846{
13847 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13848 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13849 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13850 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13851 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13852 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13853 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13854 /* 0x06 */ IEMOP_X4(iemOp_clts),
13855 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13856 /* 0x08 */ IEMOP_X4(iemOp_invd),
13857 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13858 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13859 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13860 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13861 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13862 /* 0x0e */ IEMOP_X4(iemOp_femms),
13863 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13864
13865 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13866 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13867 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13868 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13869 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13870 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13871 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13872 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13873 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13874 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13875 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13876 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13877 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13878 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13879 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13880 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13881
13882 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13883 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13884 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13885 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13886 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13887 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13888 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13889 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13890 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13891 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13892 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13893 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13894 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13895 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13896 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13897 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13898
13899 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13900 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13901 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13902 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13903 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13904 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13905 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13906 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13907 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13908 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13909 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13910 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13911 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13912 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13913 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13914 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13915
13916 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13917 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13918 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13919 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13920 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13921 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13922 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13923 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13924 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13925 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13926 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13927 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13928 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13929 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13930 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13931 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13932
13933 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13934 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13935 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13936 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13937 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13938 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13939 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13940 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13941 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13942 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13943 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13944 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13945 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13946 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13947 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13948 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13949
13950 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13951 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13952 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13953 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13954 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13955 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13956 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13957 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13958 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13959 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13960 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13961 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13962 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13963 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13964 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13965 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13966
13967 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13968 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13969 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13970 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13971 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13972 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13973 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13974 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13975
13976 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13977 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13978 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13979 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13980 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13981 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13982 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13983 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13984
13985 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13986 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13987 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13988 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13989 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13990 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13991 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13992 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13993 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13994 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13995 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13996 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13997 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13998 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13999 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14000 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14001
14002 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14003 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14004 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14005 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14006 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14007 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14008 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14009 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14010 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14011 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14012 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14013 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14014 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14015 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14016 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14017 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14018
14019 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14020 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14021 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14022 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14023 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14024 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14025 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14026 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14027 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14028 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14029 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14030 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14031 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14032 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14033 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14034 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14035
14036 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14037 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14038 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14039 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14040 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14041 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14042 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14043 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14044 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14045 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14046 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14047 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14048 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14049 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14050 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14051 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14052
14053 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14054 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14055 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14056 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14057 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14058 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14059 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14060 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14061 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14062 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14063 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14064 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14065 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14066 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14067 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14068 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14069
14070 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14071 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14072 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14073 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14074 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14075 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14076 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14077 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14078 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14079 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14080 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14081 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14082 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14083 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14084 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14085 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14086
14087 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14088 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14089 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14090 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14091 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14092 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14093 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14094 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14095 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14096 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14097 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14098 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14099 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14100 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14102 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103
14104 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14105 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14106 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14107 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14108 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14109 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14110 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14111 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14112 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14113 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14114 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14115 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14116 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14118 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14119 /* 0xff */ IEMOP_X4(iemOp_ud0),
14120};
14121AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14122
14123/** @} */
14124
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette