VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 100765

Last change on this file since 100765 was 100763, checked in by vboxsync, 17 months ago

VMM/IEM: Fixed another regression from r158571 affecting xadd. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 498.1 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 100763 2023-08-01 08:42:07Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1229 }
1230
1231 /* Ignore operand size here, memory refs are always 16-bit. */
1232 IEM_MC_BEGIN(2, 0);
1233 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1238 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEM_MC_BEGIN(1, 0);
1253 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEM_MC_BEGIN(1, 0);
1284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEM_MC_BEGIN(2, 0);
1314 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 IEMOP_HLP_MIN_286();
1341 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1342}
1343
1344
1345/** Opcode 0x0f 0x00 /5. */
1346FNIEMOPRM_DEF(iemOp_Grp6_verw)
1347{
1348 IEMOP_MNEMONIC(verw, "verw Ew");
1349 IEMOP_HLP_MIN_286();
1350 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1351}
1352
1353
1354/**
1355 * Group 6 jump table.
1356 */
1357IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1358{
1359 iemOp_Grp6_sldt,
1360 iemOp_Grp6_str,
1361 iemOp_Grp6_lldt,
1362 iemOp_Grp6_ltr,
1363 iemOp_Grp6_verr,
1364 iemOp_Grp6_verw,
1365 iemOp_InvalidWithRM,
1366 iemOp_InvalidWithRM
1367};
1368
1369/** Opcode 0x0f 0x00. */
1370FNIEMOP_DEF(iemOp_Grp6)
1371{
1372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1373 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1379{
1380 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1381 IEMOP_HLP_MIN_286();
1382 IEMOP_HLP_64BIT_OP_SIZE();
1383 IEM_MC_BEGIN(2, 1);
1384 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1385 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1389 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1390 IEM_MC_END();
1391}
1392
1393
1394/** Opcode 0x0f 0x01 /0. */
1395FNIEMOP_DEF(iemOp_Grp7_vmcall)
1396{
1397 IEMOP_MNEMONIC(vmcall, "vmcall");
1398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1399
1400 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1401 want all hypercalls regardless of instruction used, and if a
1402 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1403 (NEM/win makes ASSUMPTIONS about this behavior.) */
1404 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_vmcall);
1405}
1406
1407
1408/** Opcode 0x0f 0x01 /0. */
1409#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1410FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1411{
1412 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1413 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1414 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1415 IEMOP_HLP_DONE_DECODING();
1416 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1417 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1418 iemCImpl_vmlaunch);
1419}
1420#else
1421FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1422{
1423 IEMOP_BITCH_ABOUT_STUB();
1424 IEMOP_RAISE_INVALID_OPCODE_RET();
1425}
1426#endif
1427
1428
1429/** Opcode 0x0f 0x01 /0. */
1430#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1431FNIEMOP_DEF(iemOp_Grp7_vmresume)
1432{
1433 IEMOP_MNEMONIC(vmresume, "vmresume");
1434 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1435 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1436 IEMOP_HLP_DONE_DECODING();
1437 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1438 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1439 iemCImpl_vmresume);
1440}
1441#else
1442FNIEMOP_DEF(iemOp_Grp7_vmresume)
1443{
1444 IEMOP_BITCH_ABOUT_STUB();
1445 IEMOP_RAISE_INVALID_OPCODE_RET();
1446}
1447#endif
1448
1449
1450/** Opcode 0x0f 0x01 /0. */
1451#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1452FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1453{
1454 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1455 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1456 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1457 IEMOP_HLP_DONE_DECODING();
1458 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmxoff);
1459}
1460#else
1461FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1462{
1463 IEMOP_BITCH_ABOUT_STUB();
1464 IEMOP_RAISE_INVALID_OPCODE_RET();
1465}
1466#endif
1467
1468
1469/** Opcode 0x0f 0x01 /1. */
1470FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1471{
1472 IEMOP_MNEMONIC(sidt, "sidt Ms");
1473 IEMOP_HLP_MIN_286();
1474 IEMOP_HLP_64BIT_OP_SIZE();
1475 IEM_MC_BEGIN(2, 1);
1476 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1477 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1480 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1481 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1482 IEM_MC_END();
1483}
1484
1485
1486/** Opcode 0x0f 0x01 /1. */
1487FNIEMOP_DEF(iemOp_Grp7_monitor)
1488{
1489 IEMOP_MNEMONIC(monitor, "monitor");
1490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1491 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1492}
1493
1494
1495/** Opcode 0x0f 0x01 /1. */
1496FNIEMOP_DEF(iemOp_Grp7_mwait)
1497{
1498 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_mwait);
1501}
1502
1503
1504/** Opcode 0x0f 0x01 /2. */
1505FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1506{
1507 IEMOP_MNEMONIC(lgdt, "lgdt");
1508 IEMOP_HLP_64BIT_OP_SIZE();
1509 IEM_MC_BEGIN(3, 1);
1510 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1511 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1512 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1515 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1516 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1517 IEM_MC_END();
1518}
1519
1520
1521/** Opcode 0x0f 0x01 0xd0. */
1522FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1523{
1524 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1525 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1526 {
1527 /** @todo r=ramshankar: We should use
1528 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1529 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1530/** @todo testcase: test prefixes and exceptions. currently not checking for the
1531 * OPSIZE one ... */
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1533 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_xgetbv);
1534 }
1535 IEMOP_RAISE_INVALID_OPCODE_RET();
1536}
1537
1538
1539/** Opcode 0x0f 0x01 0xd1. */
1540FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1541{
1542 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1543 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1544 {
1545 /** @todo r=ramshankar: We should use
1546 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1547 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1548/** @todo testcase: test prefixes and exceptions. currently not checking for the
1549 * OPSIZE one ... */
1550 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1551 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_xsetbv);
1552 }
1553 IEMOP_RAISE_INVALID_OPCODE_RET();
1554}
1555
1556
1557/** Opcode 0x0f 0x01 /3. */
1558FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1559{
1560 IEMOP_MNEMONIC(lidt, "lidt");
1561 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1562 IEM_MC_BEGIN(3, 1);
1563 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1564 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1565 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1568 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1569 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1570 IEM_MC_END();
1571}
1572
1573
1574/** Opcode 0x0f 0x01 0xd8. */
1575#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1576FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1577{
1578 IEMOP_MNEMONIC(vmrun, "vmrun");
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1580 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1581 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1582 iemCImpl_vmrun);
1583}
1584#else
1585FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1586#endif
1587
1588/** Opcode 0x0f 0x01 0xd9. */
1589FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1590{
1591 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1592 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1593 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1594 * here cannot be right... */
1595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1596
1597 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1598 want all hypercalls regardless of instruction used, and if a
1599 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1600 (NEM/win makes ASSUMPTIONS about this behavior.) */
1601 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmmcall);
1602}
1603
1604/** Opcode 0x0f 0x01 0xda. */
1605#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1606FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1607{
1608 IEMOP_MNEMONIC(vmload, "vmload");
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1610 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmload);
1611}
1612#else
1613FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1614#endif
1615
1616
1617/** Opcode 0x0f 0x01 0xdb. */
1618#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1619FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1620{
1621 IEMOP_MNEMONIC(vmsave, "vmsave");
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1623 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmsave);
1624}
1625#else
1626FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1627#endif
1628
1629
1630/** Opcode 0x0f 0x01 0xdc. */
1631#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1632FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1633{
1634 IEMOP_MNEMONIC(stgi, "stgi");
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1636 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_stgi);
1637}
1638#else
1639FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1640#endif
1641
1642
1643/** Opcode 0x0f 0x01 0xdd. */
1644#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1645FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1646{
1647 IEMOP_MNEMONIC(clgi, "clgi");
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1649 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clgi);
1650}
1651#else
1652FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1653#endif
1654
1655
1656/** Opcode 0x0f 0x01 0xdf. */
1657#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1658FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1659{
1660 IEMOP_MNEMONIC(invlpga, "invlpga");
1661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1662 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpga);
1663}
1664#else
1665FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1666#endif
1667
1668
1669/** Opcode 0x0f 0x01 0xde. */
1670#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1671FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1672{
1673 IEMOP_MNEMONIC(skinit, "skinit");
1674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1675 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_skinit);
1676}
1677#else
1678FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1679#endif
1680
1681
1682/** Opcode 0x0f 0x01 /4. */
1683FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1684{
1685 IEMOP_MNEMONIC(smsw, "smsw");
1686 IEMOP_HLP_MIN_286();
1687 if (IEM_IS_MODRM_REG_MODE(bRm))
1688 {
1689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1690 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1691 }
1692
1693 /* Ignore operand size here, memory refs are always 16-bit. */
1694 IEM_MC_BEGIN(2, 0);
1695 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1696 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1699 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1700 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1701 IEM_MC_END();
1702}
1703
1704
1705/** Opcode 0x0f 0x01 /6. */
1706FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1707{
1708 /* The operand size is effectively ignored, all is 16-bit and only the
1709 lower 3-bits are used. */
1710 IEMOP_MNEMONIC(lmsw, "lmsw");
1711 IEMOP_HLP_MIN_286();
1712 if (IEM_IS_MODRM_REG_MODE(bRm))
1713 {
1714 IEM_MC_BEGIN(2, 0);
1715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1716 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1717 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1718 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1719 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1720 IEM_MC_END();
1721 }
1722 else
1723 {
1724 IEM_MC_BEGIN(2, 0);
1725 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1726 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1729 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1730 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1731 IEM_MC_END();
1732 }
1733}
1734
1735
1736/** Opcode 0x0f 0x01 /7. */
1737FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1738{
1739 IEMOP_MNEMONIC(invlpg, "invlpg");
1740 IEMOP_HLP_MIN_486();
1741 IEM_MC_BEGIN(1, 1);
1742 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1745 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpg, GCPtrEffDst);
1746 IEM_MC_END();
1747}
1748
1749
1750/** Opcode 0x0f 0x01 0xf8. */
1751FNIEMOP_DEF(iemOp_Grp7_swapgs)
1752{
1753 IEMOP_MNEMONIC(swapgs, "swapgs");
1754 IEMOP_HLP_ONLY_64BIT();
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1756 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_swapgs);
1757}
1758
1759
1760/** Opcode 0x0f 0x01 0xf9. */
1761FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1762{
1763 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1765 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtscp);
1766}
1767
1768
1769/**
1770 * Group 7 jump table, memory variant.
1771 */
1772IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1773{
1774 iemOp_Grp7_sgdt,
1775 iemOp_Grp7_sidt,
1776 iemOp_Grp7_lgdt,
1777 iemOp_Grp7_lidt,
1778 iemOp_Grp7_smsw,
1779 iemOp_InvalidWithRM,
1780 iemOp_Grp7_lmsw,
1781 iemOp_Grp7_invlpg
1782};
1783
1784
1785/** Opcode 0x0f 0x01. */
1786FNIEMOP_DEF(iemOp_Grp7)
1787{
1788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1789 if (IEM_IS_MODRM_MEM_MODE(bRm))
1790 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1791
1792 switch (IEM_GET_MODRM_REG_8(bRm))
1793 {
1794 case 0:
1795 switch (IEM_GET_MODRM_RM_8(bRm))
1796 {
1797 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1798 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1799 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1800 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1801 }
1802 IEMOP_RAISE_INVALID_OPCODE_RET();
1803
1804 case 1:
1805 switch (IEM_GET_MODRM_RM_8(bRm))
1806 {
1807 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1808 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1809 }
1810 IEMOP_RAISE_INVALID_OPCODE_RET();
1811
1812 case 2:
1813 switch (IEM_GET_MODRM_RM_8(bRm))
1814 {
1815 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1816 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1817 }
1818 IEMOP_RAISE_INVALID_OPCODE_RET();
1819
1820 case 3:
1821 switch (IEM_GET_MODRM_RM_8(bRm))
1822 {
1823 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1824 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1825 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1826 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1827 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1828 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1829 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1830 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1832 }
1833
1834 case 4:
1835 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1836
1837 case 5:
1838 IEMOP_RAISE_INVALID_OPCODE_RET();
1839
1840 case 6:
1841 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1842
1843 case 7:
1844 switch (IEM_GET_MODRM_RM_8(bRm))
1845 {
1846 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1847 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1848 }
1849 IEMOP_RAISE_INVALID_OPCODE_RET();
1850
1851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1852 }
1853}
1854
1855/** Opcode 0x0f 0x00 /3. */
1856FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1857{
1858 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1860
1861 if (IEM_IS_MODRM_REG_MODE(bRm))
1862 {
1863 switch (pVCpu->iem.s.enmEffOpSize)
1864 {
1865 case IEMMODE_16BIT:
1866 {
1867 IEM_MC_BEGIN(3, 0);
1868 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1869 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1870 IEM_MC_ARG(uint16_t, u16Sel, 1);
1871 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1872
1873 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1874 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1875 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1876
1877 IEM_MC_END();
1878 }
1879
1880 case IEMMODE_32BIT:
1881 case IEMMODE_64BIT:
1882 {
1883 IEM_MC_BEGIN(3, 0);
1884 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1885 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1886 IEM_MC_ARG(uint16_t, u16Sel, 1);
1887 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1888
1889 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1890 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1891 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1892
1893 IEM_MC_END();
1894 }
1895
1896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1897 }
1898 }
1899 else
1900 {
1901 switch (pVCpu->iem.s.enmEffOpSize)
1902 {
1903 case IEMMODE_16BIT:
1904 {
1905 IEM_MC_BEGIN(3, 1);
1906 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1907 IEM_MC_ARG(uint16_t, u16Sel, 1);
1908 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1910
1911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1912 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1913
1914 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1915 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1916 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1917
1918 IEM_MC_END();
1919 }
1920
1921 case IEMMODE_32BIT:
1922 case IEMMODE_64BIT:
1923 {
1924 IEM_MC_BEGIN(3, 1);
1925 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1926 IEM_MC_ARG(uint16_t, u16Sel, 1);
1927 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1929
1930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1931 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1932/** @todo testcase: make sure it's a 16-bit read. */
1933
1934 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1935 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1936 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1937
1938 IEM_MC_END();
1939 }
1940
1941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1942 }
1943 }
1944}
1945
1946
1947
1948/** Opcode 0x0f 0x02. */
1949FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1950{
1951 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1952 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1953}
1954
1955
1956/** Opcode 0x0f 0x03. */
1957FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1958{
1959 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1960 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1961}
1962
1963
1964/** Opcode 0x0f 0x05. */
1965FNIEMOP_DEF(iemOp_syscall)
1966{
1967 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1969 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1970 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1971 iemCImpl_syscall);
1972}
1973
1974
1975/** Opcode 0x0f 0x06. */
1976FNIEMOP_DEF(iemOp_clts)
1977{
1978 IEMOP_MNEMONIC(clts, "clts");
1979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1980 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clts);
1981}
1982
1983
1984/** Opcode 0x0f 0x07. */
1985FNIEMOP_DEF(iemOp_sysret)
1986{
1987 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1989 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1990 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1991 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1992}
1993
1994
1995/** Opcode 0x0f 0x08. */
1996FNIEMOP_DEF(iemOp_invd)
1997{
1998 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1999 IEMOP_HLP_MIN_486();
2000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2001 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invd);
2002}
2003
2004
2005/** Opcode 0x0f 0x09. */
2006FNIEMOP_DEF(iemOp_wbinvd)
2007{
2008 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2009 IEMOP_HLP_MIN_486();
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wbinvd);
2012}
2013
2014
2015/** Opcode 0x0f 0x0b. */
2016FNIEMOP_DEF(iemOp_ud2)
2017{
2018 IEMOP_MNEMONIC(ud2, "ud2");
2019 IEMOP_RAISE_INVALID_OPCODE_RET();
2020}
2021
2022/** Opcode 0x0f 0x0d. */
2023FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2024{
2025 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2026 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2027 {
2028 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2029 IEMOP_RAISE_INVALID_OPCODE_RET();
2030 }
2031
2032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2033 if (IEM_IS_MODRM_REG_MODE(bRm))
2034 {
2035 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2036 IEMOP_RAISE_INVALID_OPCODE_RET();
2037 }
2038
2039 switch (IEM_GET_MODRM_REG_8(bRm))
2040 {
2041 case 2: /* Aliased to /0 for the time being. */
2042 case 4: /* Aliased to /0 for the time being. */
2043 case 5: /* Aliased to /0 for the time being. */
2044 case 6: /* Aliased to /0 for the time being. */
2045 case 7: /* Aliased to /0 for the time being. */
2046 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2047 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2048 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2050 }
2051
2052 IEM_MC_BEGIN(0, 1);
2053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2056 /* Currently a NOP. */
2057 NOREF(GCPtrEffSrc);
2058 IEM_MC_ADVANCE_RIP_AND_FINISH();
2059 IEM_MC_END();
2060}
2061
2062
2063/** Opcode 0x0f 0x0e. */
2064FNIEMOP_DEF(iemOp_femms)
2065{
2066 IEMOP_MNEMONIC(femms, "femms");
2067
2068 IEM_MC_BEGIN(0,0);
2069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2072 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2073 IEM_MC_FPU_FROM_MMX_MODE();
2074 IEM_MC_ADVANCE_RIP_AND_FINISH();
2075 IEM_MC_END();
2076}
2077
2078
2079/** Opcode 0x0f 0x0f. */
2080FNIEMOP_DEF(iemOp_3Dnow)
2081{
2082 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2083 {
2084 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2085 IEMOP_RAISE_INVALID_OPCODE_RET();
2086 }
2087
2088#ifdef IEM_WITH_3DNOW
2089 /* This is pretty sparse, use switch instead of table. */
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2092#else
2093 IEMOP_BITCH_ABOUT_STUB();
2094 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2095#endif
2096}
2097
2098
2099/**
2100 * @opcode 0x10
2101 * @oppfx none
2102 * @opcpuid sse
2103 * @opgroup og_sse_simdfp_datamove
2104 * @opxcpttype 4UA
2105 * @optest op1=1 op2=2 -> op1=2
2106 * @optest op1=0 op2=-22 -> op1=-22
2107 */
2108FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2109{
2110 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2112 if (IEM_IS_MODRM_REG_MODE(bRm))
2113 {
2114 /*
2115 * XMM128, XMM128.
2116 */
2117 IEM_MC_BEGIN(0, 0);
2118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2119 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2120 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2121 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2122 IEM_GET_MODRM_RM(pVCpu, bRm));
2123 IEM_MC_ADVANCE_RIP_AND_FINISH();
2124 IEM_MC_END();
2125 }
2126 else
2127 {
2128 /*
2129 * XMM128, [mem128].
2130 */
2131 IEM_MC_BEGIN(0, 2);
2132 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134
2135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2137 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2139
2140 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2141 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2142
2143 IEM_MC_ADVANCE_RIP_AND_FINISH();
2144 IEM_MC_END();
2145 }
2146
2147}
2148
2149
2150/**
2151 * @opcode 0x10
2152 * @oppfx 0x66
2153 * @opcpuid sse2
2154 * @opgroup og_sse2_pcksclr_datamove
2155 * @opxcpttype 4UA
2156 * @optest op1=1 op2=2 -> op1=2
2157 * @optest op1=0 op2=-42 -> op1=-42
2158 */
2159FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2160{
2161 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2163 if (IEM_IS_MODRM_REG_MODE(bRm))
2164 {
2165 /*
2166 * XMM128, XMM128.
2167 */
2168 IEM_MC_BEGIN(0, 0);
2169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2170 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2173 IEM_GET_MODRM_RM(pVCpu, bRm));
2174 IEM_MC_ADVANCE_RIP_AND_FINISH();
2175 IEM_MC_END();
2176 }
2177 else
2178 {
2179 /*
2180 * XMM128, [mem128].
2181 */
2182 IEM_MC_BEGIN(0, 2);
2183 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2185
2186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2188 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2189 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2190
2191 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2192 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2193
2194 IEM_MC_ADVANCE_RIP_AND_FINISH();
2195 IEM_MC_END();
2196 }
2197}
2198
2199
2200/**
2201 * @opcode 0x10
2202 * @oppfx 0xf3
2203 * @opcpuid sse
2204 * @opgroup og_sse_simdfp_datamove
2205 * @opxcpttype 5
2206 * @optest op1=1 op2=2 -> op1=2
2207 * @optest op1=0 op2=-22 -> op1=-22
2208 */
2209FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2210{
2211 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 if (IEM_IS_MODRM_REG_MODE(bRm))
2214 {
2215 /*
2216 * XMM32, XMM32.
2217 */
2218 IEM_MC_BEGIN(0, 1);
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2220 IEM_MC_LOCAL(uint32_t, uSrc);
2221
2222 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2224 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2225 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2226
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /*
2233 * XMM128, [mem32].
2234 */
2235 IEM_MC_BEGIN(0, 2);
2236 IEM_MC_LOCAL(uint32_t, uSrc);
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2243
2244 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2245 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2246
2247 IEM_MC_ADVANCE_RIP_AND_FINISH();
2248 IEM_MC_END();
2249 }
2250}
2251
2252
2253/**
2254 * @opcode 0x10
2255 * @oppfx 0xf2
2256 * @opcpuid sse2
2257 * @opgroup og_sse2_pcksclr_datamove
2258 * @opxcpttype 5
2259 * @optest op1=1 op2=2 -> op1=2
2260 * @optest op1=0 op2=-42 -> op1=-42
2261 */
2262FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2263{
2264 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2266 if (IEM_IS_MODRM_REG_MODE(bRm))
2267 {
2268 /*
2269 * XMM64, XMM64.
2270 */
2271 IEM_MC_BEGIN(0, 1);
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2273 IEM_MC_LOCAL(uint64_t, uSrc);
2274
2275 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2277 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2278 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2279
2280 IEM_MC_ADVANCE_RIP_AND_FINISH();
2281 IEM_MC_END();
2282 }
2283 else
2284 {
2285 /*
2286 * XMM128, [mem64].
2287 */
2288 IEM_MC_BEGIN(0, 2);
2289 IEM_MC_LOCAL(uint64_t, uSrc);
2290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2291
2292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2294 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2295 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2296
2297 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2298 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2299
2300 IEM_MC_ADVANCE_RIP_AND_FINISH();
2301 IEM_MC_END();
2302 }
2303}
2304
2305
2306/**
2307 * @opcode 0x11
2308 * @oppfx none
2309 * @opcpuid sse
2310 * @opgroup og_sse_simdfp_datamove
2311 * @opxcpttype 4UA
2312 * @optest op1=1 op2=2 -> op1=2
2313 * @optest op1=0 op2=-42 -> op1=-42
2314 */
2315FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2316{
2317 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2319 if (IEM_IS_MODRM_REG_MODE(bRm))
2320 {
2321 /*
2322 * XMM128, XMM128.
2323 */
2324 IEM_MC_BEGIN(0, 0);
2325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2326 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2327 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2328 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2329 IEM_GET_MODRM_REG(pVCpu, bRm));
2330 IEM_MC_ADVANCE_RIP_AND_FINISH();
2331 IEM_MC_END();
2332 }
2333 else
2334 {
2335 /*
2336 * [mem128], XMM128.
2337 */
2338 IEM_MC_BEGIN(0, 2);
2339 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2341
2342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2344 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2345 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2346
2347 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2348 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2349
2350 IEM_MC_ADVANCE_RIP_AND_FINISH();
2351 IEM_MC_END();
2352 }
2353}
2354
2355
2356/**
2357 * @opcode 0x11
2358 * @oppfx 0x66
2359 * @opcpuid sse2
2360 * @opgroup og_sse2_pcksclr_datamove
2361 * @opxcpttype 4UA
2362 * @optest op1=1 op2=2 -> op1=2
2363 * @optest op1=0 op2=-42 -> op1=-42
2364 */
2365FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2366{
2367 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2369 if (IEM_IS_MODRM_REG_MODE(bRm))
2370 {
2371 /*
2372 * XMM128, XMM128.
2373 */
2374 IEM_MC_BEGIN(0, 0);
2375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2376 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2377 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2378 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2379 IEM_GET_MODRM_REG(pVCpu, bRm));
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 }
2383 else
2384 {
2385 /*
2386 * [mem128], XMM128.
2387 */
2388 IEM_MC_BEGIN(0, 2);
2389 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2396
2397 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2398 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2399
2400 IEM_MC_ADVANCE_RIP_AND_FINISH();
2401 IEM_MC_END();
2402 }
2403}
2404
2405
2406/**
2407 * @opcode 0x11
2408 * @oppfx 0xf3
2409 * @opcpuid sse
2410 * @opgroup og_sse_simdfp_datamove
2411 * @opxcpttype 5
2412 * @optest op1=1 op2=2 -> op1=2
2413 * @optest op1=0 op2=-22 -> op1=-22
2414 */
2415FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2416{
2417 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2419 if (IEM_IS_MODRM_REG_MODE(bRm))
2420 {
2421 /*
2422 * XMM32, XMM32.
2423 */
2424 IEM_MC_BEGIN(0, 1);
2425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427
2428 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2429 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2430 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2431 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2432
2433 IEM_MC_ADVANCE_RIP_AND_FINISH();
2434 IEM_MC_END();
2435 }
2436 else
2437 {
2438 /*
2439 * [mem32], XMM32.
2440 */
2441 IEM_MC_BEGIN(0, 2);
2442 IEM_MC_LOCAL(uint32_t, uSrc);
2443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2444
2445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2449
2450 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2451 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2452
2453 IEM_MC_ADVANCE_RIP_AND_FINISH();
2454 IEM_MC_END();
2455 }
2456}
2457
2458
2459/**
2460 * @opcode 0x11
2461 * @oppfx 0xf2
2462 * @opcpuid sse2
2463 * @opgroup og_sse2_pcksclr_datamove
2464 * @opxcpttype 5
2465 * @optest op1=1 op2=2 -> op1=2
2466 * @optest op1=0 op2=-42 -> op1=-42
2467 */
2468FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2469{
2470 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2472 if (IEM_IS_MODRM_REG_MODE(bRm))
2473 {
2474 /*
2475 * XMM64, XMM64.
2476 */
2477 IEM_MC_BEGIN(0, 1);
2478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2479 IEM_MC_LOCAL(uint64_t, uSrc);
2480
2481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2483 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2484 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2485
2486 IEM_MC_ADVANCE_RIP_AND_FINISH();
2487 IEM_MC_END();
2488 }
2489 else
2490 {
2491 /*
2492 * [mem64], XMM64.
2493 */
2494 IEM_MC_BEGIN(0, 2);
2495 IEM_MC_LOCAL(uint64_t, uSrc);
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2500 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2502
2503 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2504 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2505
2506 IEM_MC_ADVANCE_RIP_AND_FINISH();
2507 IEM_MC_END();
2508 }
2509}
2510
2511
2512FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2513{
2514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2515 if (IEM_IS_MODRM_REG_MODE(bRm))
2516 {
2517 /**
2518 * @opcode 0x12
2519 * @opcodesub 11 mr/reg
2520 * @oppfx none
2521 * @opcpuid sse
2522 * @opgroup og_sse_simdfp_datamove
2523 * @opxcpttype 5
2524 * @optest op1=1 op2=2 -> op1=2
2525 * @optest op1=0 op2=-42 -> op1=-42
2526 */
2527 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2528
2529 IEM_MC_BEGIN(0, 1);
2530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2531 IEM_MC_LOCAL(uint64_t, uSrc);
2532
2533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2535 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2536 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP_AND_FINISH();
2539 IEM_MC_END();
2540 }
2541 else
2542 {
2543 /**
2544 * @opdone
2545 * @opcode 0x12
2546 * @opcodesub !11 mr/reg
2547 * @oppfx none
2548 * @opcpuid sse
2549 * @opgroup og_sse_simdfp_datamove
2550 * @opxcpttype 5
2551 * @optest op1=1 op2=2 -> op1=2
2552 * @optest op1=0 op2=-42 -> op1=-42
2553 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2554 */
2555 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2556
2557 IEM_MC_BEGIN(0, 2);
2558 IEM_MC_LOCAL(uint64_t, uSrc);
2559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2560
2561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2565
2566 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2567 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2568
2569 IEM_MC_ADVANCE_RIP_AND_FINISH();
2570 IEM_MC_END();
2571 }
2572}
2573
2574
2575/**
2576 * @opcode 0x12
2577 * @opcodesub !11 mr/reg
2578 * @oppfx 0x66
2579 * @opcpuid sse2
2580 * @opgroup og_sse2_pcksclr_datamove
2581 * @opxcpttype 5
2582 * @optest op1=1 op2=2 -> op1=2
2583 * @optest op1=0 op2=-42 -> op1=-42
2584 */
2585FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2586{
2587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2588 if (IEM_IS_MODRM_MEM_MODE(bRm))
2589 {
2590 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2591
2592 IEM_MC_BEGIN(0, 2);
2593 IEM_MC_LOCAL(uint64_t, uSrc);
2594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2595
2596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2600
2601 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2602 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2603
2604 IEM_MC_ADVANCE_RIP_AND_FINISH();
2605 IEM_MC_END();
2606 }
2607
2608 /**
2609 * @opdone
2610 * @opmnemonic ud660f12m3
2611 * @opcode 0x12
2612 * @opcodesub 11 mr/reg
2613 * @oppfx 0x66
2614 * @opunused immediate
2615 * @opcpuid sse
2616 * @optest ->
2617 */
2618 else
2619 IEMOP_RAISE_INVALID_OPCODE_RET();
2620}
2621
2622
2623/**
2624 * @opcode 0x12
2625 * @oppfx 0xf3
2626 * @opcpuid sse3
2627 * @opgroup og_sse3_pcksclr_datamove
2628 * @opxcpttype 4
2629 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2630 * op1=0x00000002000000020000000100000001
2631 */
2632FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2633{
2634 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2636 if (IEM_IS_MODRM_REG_MODE(bRm))
2637 {
2638 /*
2639 * XMM, XMM.
2640 */
2641 IEM_MC_BEGIN(0, 1);
2642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2643 IEM_MC_LOCAL(RTUINT128U, uSrc);
2644
2645 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2646 IEM_MC_PREPARE_SSE_USAGE();
2647
2648 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2649 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2650 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2651 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2652 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2653
2654 IEM_MC_ADVANCE_RIP_AND_FINISH();
2655 IEM_MC_END();
2656 }
2657 else
2658 {
2659 /*
2660 * XMM, [mem128].
2661 */
2662 IEM_MC_BEGIN(0, 2);
2663 IEM_MC_LOCAL(RTUINT128U, uSrc);
2664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2665
2666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2668 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2669 IEM_MC_PREPARE_SSE_USAGE();
2670
2671 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2672 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2673 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2674 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2675 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2676
2677 IEM_MC_ADVANCE_RIP_AND_FINISH();
2678 IEM_MC_END();
2679 }
2680}
2681
2682
2683/**
2684 * @opcode 0x12
2685 * @oppfx 0xf2
2686 * @opcpuid sse3
2687 * @opgroup og_sse3_pcksclr_datamove
2688 * @opxcpttype 5
2689 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2690 * op1=0x22222222111111112222222211111111
2691 */
2692FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2693{
2694 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2696 if (IEM_IS_MODRM_REG_MODE(bRm))
2697 {
2698 /*
2699 * XMM128, XMM64.
2700 */
2701 IEM_MC_BEGIN(1, 0);
2702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2703 IEM_MC_ARG(uint64_t, uSrc, 0);
2704
2705 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2706 IEM_MC_PREPARE_SSE_USAGE();
2707
2708 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2709 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2710 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2711
2712 IEM_MC_ADVANCE_RIP_AND_FINISH();
2713 IEM_MC_END();
2714 }
2715 else
2716 {
2717 /*
2718 * XMM128, [mem64].
2719 */
2720 IEM_MC_BEGIN(1, 1);
2721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2722 IEM_MC_ARG(uint64_t, uSrc, 0);
2723
2724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2726 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2727 IEM_MC_PREPARE_SSE_USAGE();
2728
2729 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2730 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2731 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2732
2733 IEM_MC_ADVANCE_RIP_AND_FINISH();
2734 IEM_MC_END();
2735 }
2736}
2737
2738
2739/**
2740 * @opcode 0x13
2741 * @opcodesub !11 mr/reg
2742 * @oppfx none
2743 * @opcpuid sse
2744 * @opgroup og_sse_simdfp_datamove
2745 * @opxcpttype 5
2746 * @optest op1=1 op2=2 -> op1=2
2747 * @optest op1=0 op2=-42 -> op1=-42
2748 */
2749FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2750{
2751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2752 if (IEM_IS_MODRM_MEM_MODE(bRm))
2753 {
2754 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2755
2756 IEM_MC_BEGIN(0, 2);
2757 IEM_MC_LOCAL(uint64_t, uSrc);
2758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2759
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2762 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2763 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2764
2765 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2766 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2767
2768 IEM_MC_ADVANCE_RIP_AND_FINISH();
2769 IEM_MC_END();
2770 }
2771
2772 /**
2773 * @opdone
2774 * @opmnemonic ud0f13m3
2775 * @opcode 0x13
2776 * @opcodesub 11 mr/reg
2777 * @oppfx none
2778 * @opunused immediate
2779 * @opcpuid sse
2780 * @optest ->
2781 */
2782 else
2783 IEMOP_RAISE_INVALID_OPCODE_RET();
2784}
2785
2786
2787/**
2788 * @opcode 0x13
2789 * @opcodesub !11 mr/reg
2790 * @oppfx 0x66
2791 * @opcpuid sse2
2792 * @opgroup og_sse2_pcksclr_datamove
2793 * @opxcpttype 5
2794 * @optest op1=1 op2=2 -> op1=2
2795 * @optest op1=0 op2=-42 -> op1=-42
2796 */
2797FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2798{
2799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2800 if (IEM_IS_MODRM_MEM_MODE(bRm))
2801 {
2802 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2803 IEM_MC_BEGIN(0, 2);
2804 IEM_MC_LOCAL(uint64_t, uSrc);
2805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2806
2807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2809 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2810 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2811
2812 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2813 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2814
2815 IEM_MC_ADVANCE_RIP_AND_FINISH();
2816 IEM_MC_END();
2817 }
2818
2819 /**
2820 * @opdone
2821 * @opmnemonic ud660f13m3
2822 * @opcode 0x13
2823 * @opcodesub 11 mr/reg
2824 * @oppfx 0x66
2825 * @opunused immediate
2826 * @opcpuid sse
2827 * @optest ->
2828 */
2829 else
2830 IEMOP_RAISE_INVALID_OPCODE_RET();
2831}
2832
2833
2834/**
2835 * @opmnemonic udf30f13
2836 * @opcode 0x13
2837 * @oppfx 0xf3
2838 * @opunused intel-modrm
2839 * @opcpuid sse
2840 * @optest ->
2841 * @opdone
2842 */
2843
2844/**
2845 * @opmnemonic udf20f13
2846 * @opcode 0x13
2847 * @oppfx 0xf2
2848 * @opunused intel-modrm
2849 * @opcpuid sse
2850 * @optest ->
2851 * @opdone
2852 */
2853
2854/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2855FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2856{
2857 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2858 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2859}
2860
2861
2862/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2863FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2864{
2865 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2866 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2867}
2868
2869
2870/**
2871 * @opdone
2872 * @opmnemonic udf30f14
2873 * @opcode 0x14
2874 * @oppfx 0xf3
2875 * @opunused intel-modrm
2876 * @opcpuid sse
2877 * @optest ->
2878 * @opdone
2879 */
2880
2881/**
2882 * @opmnemonic udf20f14
2883 * @opcode 0x14
2884 * @oppfx 0xf2
2885 * @opunused intel-modrm
2886 * @opcpuid sse
2887 * @optest ->
2888 * @opdone
2889 */
2890
2891/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2892FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2893{
2894 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2895 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2896}
2897
2898
2899/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2900FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2901{
2902 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2903 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2904}
2905
2906
2907/* Opcode 0xf3 0x0f 0x15 - invalid */
2908/* Opcode 0xf2 0x0f 0x15 - invalid */
2909
2910/**
2911 * @opdone
2912 * @opmnemonic udf30f15
2913 * @opcode 0x15
2914 * @oppfx 0xf3
2915 * @opunused intel-modrm
2916 * @opcpuid sse
2917 * @optest ->
2918 * @opdone
2919 */
2920
2921/**
2922 * @opmnemonic udf20f15
2923 * @opcode 0x15
2924 * @oppfx 0xf2
2925 * @opunused intel-modrm
2926 * @opcpuid sse
2927 * @optest ->
2928 * @opdone
2929 */
2930
2931FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2932{
2933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2934 if (IEM_IS_MODRM_REG_MODE(bRm))
2935 {
2936 /**
2937 * @opcode 0x16
2938 * @opcodesub 11 mr/reg
2939 * @oppfx none
2940 * @opcpuid sse
2941 * @opgroup og_sse_simdfp_datamove
2942 * @opxcpttype 5
2943 * @optest op1=1 op2=2 -> op1=2
2944 * @optest op1=0 op2=-42 -> op1=-42
2945 */
2946 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2947
2948 IEM_MC_BEGIN(0, 1);
2949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2950 IEM_MC_LOCAL(uint64_t, uSrc);
2951
2952 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2953 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2954 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2955 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2956
2957 IEM_MC_ADVANCE_RIP_AND_FINISH();
2958 IEM_MC_END();
2959 }
2960 else
2961 {
2962 /**
2963 * @opdone
2964 * @opcode 0x16
2965 * @opcodesub !11 mr/reg
2966 * @oppfx none
2967 * @opcpuid sse
2968 * @opgroup og_sse_simdfp_datamove
2969 * @opxcpttype 5
2970 * @optest op1=1 op2=2 -> op1=2
2971 * @optest op1=0 op2=-42 -> op1=-42
2972 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2973 */
2974 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2975
2976 IEM_MC_BEGIN(0, 2);
2977 IEM_MC_LOCAL(uint64_t, uSrc);
2978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2979
2980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2983 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2984
2985 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2986 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2987
2988 IEM_MC_ADVANCE_RIP_AND_FINISH();
2989 IEM_MC_END();
2990 }
2991}
2992
2993
2994/**
2995 * @opcode 0x16
2996 * @opcodesub !11 mr/reg
2997 * @oppfx 0x66
2998 * @opcpuid sse2
2999 * @opgroup og_sse2_pcksclr_datamove
3000 * @opxcpttype 5
3001 * @optest op1=1 op2=2 -> op1=2
3002 * @optest op1=0 op2=-42 -> op1=-42
3003 */
3004FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3005{
3006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3007 if (IEM_IS_MODRM_MEM_MODE(bRm))
3008 {
3009 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3010 IEM_MC_BEGIN(0, 2);
3011 IEM_MC_LOCAL(uint64_t, uSrc);
3012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3013
3014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3016 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3017 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3018
3019 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3020 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3021
3022 IEM_MC_ADVANCE_RIP_AND_FINISH();
3023 IEM_MC_END();
3024 }
3025
3026 /**
3027 * @opdone
3028 * @opmnemonic ud660f16m3
3029 * @opcode 0x16
3030 * @opcodesub 11 mr/reg
3031 * @oppfx 0x66
3032 * @opunused immediate
3033 * @opcpuid sse
3034 * @optest ->
3035 */
3036 else
3037 IEMOP_RAISE_INVALID_OPCODE_RET();
3038}
3039
3040
3041/**
3042 * @opcode 0x16
3043 * @oppfx 0xf3
3044 * @opcpuid sse3
3045 * @opgroup og_sse3_pcksclr_datamove
3046 * @opxcpttype 4
3047 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3048 * op1=0x00000002000000020000000100000001
3049 */
3050FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3051{
3052 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3054 if (IEM_IS_MODRM_REG_MODE(bRm))
3055 {
3056 /*
3057 * XMM128, XMM128.
3058 */
3059 IEM_MC_BEGIN(0, 1);
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3061 IEM_MC_LOCAL(RTUINT128U, uSrc);
3062
3063 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3064 IEM_MC_PREPARE_SSE_USAGE();
3065
3066 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3067 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3068 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3069 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3070 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3071
3072 IEM_MC_ADVANCE_RIP_AND_FINISH();
3073 IEM_MC_END();
3074 }
3075 else
3076 {
3077 /*
3078 * XMM128, [mem128].
3079 */
3080 IEM_MC_BEGIN(0, 2);
3081 IEM_MC_LOCAL(RTUINT128U, uSrc);
3082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3083
3084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3086 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3087 IEM_MC_PREPARE_SSE_USAGE();
3088
3089 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3090 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3091 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3092 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3093 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3094
3095 IEM_MC_ADVANCE_RIP_AND_FINISH();
3096 IEM_MC_END();
3097 }
3098}
3099
3100/**
3101 * @opdone
3102 * @opmnemonic udf30f16
3103 * @opcode 0x16
3104 * @oppfx 0xf2
3105 * @opunused intel-modrm
3106 * @opcpuid sse
3107 * @optest ->
3108 * @opdone
3109 */
3110
3111
3112/**
3113 * @opcode 0x17
3114 * @opcodesub !11 mr/reg
3115 * @oppfx none
3116 * @opcpuid sse
3117 * @opgroup og_sse_simdfp_datamove
3118 * @opxcpttype 5
3119 * @optest op1=1 op2=2 -> op1=2
3120 * @optest op1=0 op2=-42 -> op1=-42
3121 */
3122FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3123{
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if (IEM_IS_MODRM_MEM_MODE(bRm))
3126 {
3127 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3128
3129 IEM_MC_BEGIN(0, 2);
3130 IEM_MC_LOCAL(uint64_t, uSrc);
3131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3132
3133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3135 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3136 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3137
3138 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3139 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3140
3141 IEM_MC_ADVANCE_RIP_AND_FINISH();
3142 IEM_MC_END();
3143 }
3144
3145 /**
3146 * @opdone
3147 * @opmnemonic ud0f17m3
3148 * @opcode 0x17
3149 * @opcodesub 11 mr/reg
3150 * @oppfx none
3151 * @opunused immediate
3152 * @opcpuid sse
3153 * @optest ->
3154 */
3155 else
3156 IEMOP_RAISE_INVALID_OPCODE_RET();
3157}
3158
3159
3160/**
3161 * @opcode 0x17
3162 * @opcodesub !11 mr/reg
3163 * @oppfx 0x66
3164 * @opcpuid sse2
3165 * @opgroup og_sse2_pcksclr_datamove
3166 * @opxcpttype 5
3167 * @optest op1=1 op2=2 -> op1=2
3168 * @optest op1=0 op2=-42 -> op1=-42
3169 */
3170FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3171{
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 if (IEM_IS_MODRM_MEM_MODE(bRm))
3174 {
3175 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3176
3177 IEM_MC_BEGIN(0, 2);
3178 IEM_MC_LOCAL(uint64_t, uSrc);
3179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3180
3181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3183 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3185
3186 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3187 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3188
3189 IEM_MC_ADVANCE_RIP_AND_FINISH();
3190 IEM_MC_END();
3191 }
3192
3193 /**
3194 * @opdone
3195 * @opmnemonic ud660f17m3
3196 * @opcode 0x17
3197 * @opcodesub 11 mr/reg
3198 * @oppfx 0x66
3199 * @opunused immediate
3200 * @opcpuid sse
3201 * @optest ->
3202 */
3203 else
3204 IEMOP_RAISE_INVALID_OPCODE_RET();
3205}
3206
3207
3208/**
3209 * @opdone
3210 * @opmnemonic udf30f17
3211 * @opcode 0x17
3212 * @oppfx 0xf3
3213 * @opunused intel-modrm
3214 * @opcpuid sse
3215 * @optest ->
3216 * @opdone
3217 */
3218
3219/**
3220 * @opmnemonic udf20f17
3221 * @opcode 0x17
3222 * @oppfx 0xf2
3223 * @opunused intel-modrm
3224 * @opcpuid sse
3225 * @optest ->
3226 * @opdone
3227 */
3228
3229
3230/** Opcode 0x0f 0x18. */
3231FNIEMOP_DEF(iemOp_prefetch_Grp16)
3232{
3233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3234 if (IEM_IS_MODRM_MEM_MODE(bRm))
3235 {
3236 switch (IEM_GET_MODRM_REG_8(bRm))
3237 {
3238 case 4: /* Aliased to /0 for the time being according to AMD. */
3239 case 5: /* Aliased to /0 for the time being according to AMD. */
3240 case 6: /* Aliased to /0 for the time being according to AMD. */
3241 case 7: /* Aliased to /0 for the time being according to AMD. */
3242 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3243 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3244 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3245 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3247 }
3248
3249 IEM_MC_BEGIN(0, 1);
3250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3253 /* Currently a NOP. */
3254 NOREF(GCPtrEffSrc);
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 else
3259 IEMOP_RAISE_INVALID_OPCODE_RET();
3260}
3261
3262
3263/** Opcode 0x0f 0x19..0x1f. */
3264FNIEMOP_DEF(iemOp_nop_Ev)
3265{
3266 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3268 if (IEM_IS_MODRM_REG_MODE(bRm))
3269 {
3270 IEM_MC_BEGIN(0, 0);
3271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3272 IEM_MC_ADVANCE_RIP_AND_FINISH();
3273 IEM_MC_END();
3274 }
3275 else
3276 {
3277 IEM_MC_BEGIN(0, 1);
3278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 /* Currently a NOP. */
3282 NOREF(GCPtrEffSrc);
3283 IEM_MC_ADVANCE_RIP_AND_FINISH();
3284 IEM_MC_END();
3285 }
3286}
3287
3288
3289/** Opcode 0x0f 0x20. */
3290FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3291{
3292 /* mod is ignored, as is operand size overrides. */
3293 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3294 IEMOP_HLP_MIN_386();
3295 if (IEM_IS_64BIT_CODE(pVCpu))
3296 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3297 else
3298 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3299
3300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3301 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3302 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3303 {
3304 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3305 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3306 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3307 iCrReg |= 8;
3308 }
3309 switch (iCrReg)
3310 {
3311 case 0: case 2: case 3: case 4: case 8:
3312 break;
3313 default:
3314 IEMOP_RAISE_INVALID_OPCODE_RET();
3315 }
3316 IEMOP_HLP_DONE_DECODING();
3317
3318 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3319}
3320
3321
3322/** Opcode 0x0f 0x21. */
3323FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3324{
3325 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3326 IEMOP_HLP_MIN_386();
3327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3329 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3330 IEMOP_RAISE_INVALID_OPCODE_RET();
3331 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3332}
3333
3334
3335/** Opcode 0x0f 0x22. */
3336FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3337{
3338 /* mod is ignored, as is operand size overrides. */
3339 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3340 IEMOP_HLP_MIN_386();
3341 if (IEM_IS_64BIT_CODE(pVCpu))
3342 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3343 else
3344 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3345
3346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3347 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3348 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3349 {
3350 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3351 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3352 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3353 iCrReg |= 8;
3354 }
3355 switch (iCrReg)
3356 {
3357 case 0: case 2: case 3: case 4: case 8:
3358 break;
3359 default:
3360 IEMOP_RAISE_INVALID_OPCODE_RET();
3361 }
3362 IEMOP_HLP_DONE_DECODING();
3363
3364 if (iCrReg & (2 | 8))
3365 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3366 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3367 else
3368 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3369 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3370}
3371
3372
3373/** Opcode 0x0f 0x23. */
3374FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3375{
3376 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3377 IEMOP_HLP_MIN_386();
3378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3380 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3381 IEMOP_RAISE_INVALID_OPCODE_RET();
3382 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3383}
3384
3385
3386/** Opcode 0x0f 0x24. */
3387FNIEMOP_DEF(iemOp_mov_Rd_Td)
3388{
3389 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3390 IEMOP_HLP_MIN_386();
3391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3393 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3394 IEMOP_RAISE_INVALID_OPCODE_RET();
3395 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3396}
3397
3398
3399/** Opcode 0x0f 0x26. */
3400FNIEMOP_DEF(iemOp_mov_Td_Rd)
3401{
3402 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3403 IEMOP_HLP_MIN_386();
3404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3406 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3407 IEMOP_RAISE_INVALID_OPCODE_RET();
3408 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3409}
3410
3411
3412/**
3413 * @opcode 0x28
3414 * @oppfx none
3415 * @opcpuid sse
3416 * @opgroup og_sse_simdfp_datamove
3417 * @opxcpttype 1
3418 * @optest op1=1 op2=2 -> op1=2
3419 * @optest op1=0 op2=-42 -> op1=-42
3420 */
3421FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3422{
3423 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3425 if (IEM_IS_MODRM_REG_MODE(bRm))
3426 {
3427 /*
3428 * Register, register.
3429 */
3430 IEM_MC_BEGIN(0, 0);
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3432 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3434 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3435 IEM_GET_MODRM_RM(pVCpu, bRm));
3436 IEM_MC_ADVANCE_RIP_AND_FINISH();
3437 IEM_MC_END();
3438 }
3439 else
3440 {
3441 /*
3442 * Register, memory.
3443 */
3444 IEM_MC_BEGIN(0, 2);
3445 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3447
3448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3450 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3451 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3452
3453 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3454 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3455
3456 IEM_MC_ADVANCE_RIP_AND_FINISH();
3457 IEM_MC_END();
3458 }
3459}
3460
3461/**
3462 * @opcode 0x28
3463 * @oppfx 66
3464 * @opcpuid sse2
3465 * @opgroup og_sse2_pcksclr_datamove
3466 * @opxcpttype 1
3467 * @optest op1=1 op2=2 -> op1=2
3468 * @optest op1=0 op2=-42 -> op1=-42
3469 */
3470FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3471{
3472 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3474 if (IEM_IS_MODRM_REG_MODE(bRm))
3475 {
3476 /*
3477 * Register, register.
3478 */
3479 IEM_MC_BEGIN(0, 0);
3480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3483 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3484 IEM_GET_MODRM_RM(pVCpu, bRm));
3485 IEM_MC_ADVANCE_RIP_AND_FINISH();
3486 IEM_MC_END();
3487 }
3488 else
3489 {
3490 /*
3491 * Register, memory.
3492 */
3493 IEM_MC_BEGIN(0, 2);
3494 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3496
3497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3499 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3500 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3501
3502 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3503 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3504
3505 IEM_MC_ADVANCE_RIP_AND_FINISH();
3506 IEM_MC_END();
3507 }
3508}
3509
3510/* Opcode 0xf3 0x0f 0x28 - invalid */
3511/* Opcode 0xf2 0x0f 0x28 - invalid */
3512
3513/**
3514 * @opcode 0x29
3515 * @oppfx none
3516 * @opcpuid sse
3517 * @opgroup og_sse_simdfp_datamove
3518 * @opxcpttype 1
3519 * @optest op1=1 op2=2 -> op1=2
3520 * @optest op1=0 op2=-42 -> op1=-42
3521 */
3522FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3523{
3524 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3526 if (IEM_IS_MODRM_REG_MODE(bRm))
3527 {
3528 /*
3529 * Register, register.
3530 */
3531 IEM_MC_BEGIN(0, 0);
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3535 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3536 IEM_GET_MODRM_REG(pVCpu, bRm));
3537 IEM_MC_ADVANCE_RIP_AND_FINISH();
3538 IEM_MC_END();
3539 }
3540 else
3541 {
3542 /*
3543 * Memory, register.
3544 */
3545 IEM_MC_BEGIN(0, 2);
3546 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3548
3549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3552 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3553
3554 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3555 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3556
3557 IEM_MC_ADVANCE_RIP_AND_FINISH();
3558 IEM_MC_END();
3559 }
3560}
3561
3562/**
3563 * @opcode 0x29
3564 * @oppfx 66
3565 * @opcpuid sse2
3566 * @opgroup og_sse2_pcksclr_datamove
3567 * @opxcpttype 1
3568 * @optest op1=1 op2=2 -> op1=2
3569 * @optest op1=0 op2=-42 -> op1=-42
3570 */
3571FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3572{
3573 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3575 if (IEM_IS_MODRM_REG_MODE(bRm))
3576 {
3577 /*
3578 * Register, register.
3579 */
3580 IEM_MC_BEGIN(0, 0);
3581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3582 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3583 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3584 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3585 IEM_GET_MODRM_REG(pVCpu, bRm));
3586 IEM_MC_ADVANCE_RIP_AND_FINISH();
3587 IEM_MC_END();
3588 }
3589 else
3590 {
3591 /*
3592 * Memory, register.
3593 */
3594 IEM_MC_BEGIN(0, 2);
3595 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3597
3598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3600 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3601 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3602
3603 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3604 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3605
3606 IEM_MC_ADVANCE_RIP_AND_FINISH();
3607 IEM_MC_END();
3608 }
3609}
3610
3611/* Opcode 0xf3 0x0f 0x29 - invalid */
3612/* Opcode 0xf2 0x0f 0x29 - invalid */
3613
3614
3615/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3616FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3617{
3618 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3620 if (IEM_IS_MODRM_REG_MODE(bRm))
3621 {
3622 /*
3623 * XMM, MMX
3624 */
3625 IEM_MC_BEGIN(3, 1);
3626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3627 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3628 IEM_MC_LOCAL(X86XMMREG, Dst);
3629 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3630 IEM_MC_ARG(uint64_t, u64Src, 2);
3631 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3632 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3633 IEM_MC_PREPARE_FPU_USAGE();
3634 IEM_MC_FPU_TO_MMX_MODE();
3635
3636 IEM_MC_REF_MXCSR(pfMxcsr);
3637 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3638 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3639
3640 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3641 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3642 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3643 } IEM_MC_ELSE() {
3644 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3645 } IEM_MC_ENDIF();
3646
3647 IEM_MC_ADVANCE_RIP_AND_FINISH();
3648 IEM_MC_END();
3649 }
3650 else
3651 {
3652 /*
3653 * XMM, [mem64]
3654 */
3655 IEM_MC_BEGIN(3, 2);
3656 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3657 IEM_MC_LOCAL(X86XMMREG, Dst);
3658 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3659 IEM_MC_ARG(uint64_t, u64Src, 2);
3660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3661
3662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3666 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3667
3668 IEM_MC_PREPARE_FPU_USAGE();
3669 IEM_MC_FPU_TO_MMX_MODE();
3670 IEM_MC_REF_MXCSR(pfMxcsr);
3671
3672 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3673 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3674 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3675 } IEM_MC_ELSE() {
3676 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3677 } IEM_MC_ENDIF();
3678
3679 IEM_MC_ADVANCE_RIP_AND_FINISH();
3680 IEM_MC_END();
3681 }
3682}
3683
3684
3685/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3686FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3687{
3688 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3690 if (IEM_IS_MODRM_REG_MODE(bRm))
3691 {
3692 /*
3693 * XMM, MMX
3694 */
3695 IEM_MC_BEGIN(3, 1);
3696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3697 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3698 IEM_MC_LOCAL(X86XMMREG, Dst);
3699 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3700 IEM_MC_ARG(uint64_t, u64Src, 2);
3701 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3702 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3703 IEM_MC_PREPARE_FPU_USAGE();
3704 IEM_MC_FPU_TO_MMX_MODE();
3705
3706 IEM_MC_REF_MXCSR(pfMxcsr);
3707 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3708
3709 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3710 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3711 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3712 } IEM_MC_ELSE() {
3713 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3714 } IEM_MC_ENDIF();
3715
3716 IEM_MC_ADVANCE_RIP_AND_FINISH();
3717 IEM_MC_END();
3718 }
3719 else
3720 {
3721 /*
3722 * XMM, [mem64]
3723 */
3724 IEM_MC_BEGIN(3, 3);
3725 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3726 IEM_MC_LOCAL(X86XMMREG, Dst);
3727 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3728 IEM_MC_ARG(uint64_t, u64Src, 2);
3729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3730
3731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3733 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3735 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3736
3737 /* Doesn't cause a transition to MMX mode. */
3738 IEM_MC_PREPARE_SSE_USAGE();
3739 IEM_MC_REF_MXCSR(pfMxcsr);
3740
3741 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3742 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3743 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3744 } IEM_MC_ELSE() {
3745 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3746 } IEM_MC_ENDIF();
3747
3748 IEM_MC_ADVANCE_RIP_AND_FINISH();
3749 IEM_MC_END();
3750 }
3751}
3752
3753
3754/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3755FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3756{
3757 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3758
3759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3760 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3761 {
3762 if (IEM_IS_MODRM_REG_MODE(bRm))
3763 {
3764 /* XMM, greg64 */
3765 IEM_MC_BEGIN(3, 2);
3766 IEM_MC_LOCAL(uint32_t, fMxcsr);
3767 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3768 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3769 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3770 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3771
3772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3774 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3775
3776 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3777 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3778 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3779 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3780 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3781 } IEM_MC_ELSE() {
3782 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3783 } IEM_MC_ENDIF();
3784
3785 IEM_MC_ADVANCE_RIP_AND_FINISH();
3786 IEM_MC_END();
3787 }
3788 else
3789 {
3790 /* XMM, [mem64] */
3791 IEM_MC_BEGIN(3, 4);
3792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3793 IEM_MC_LOCAL(uint32_t, fMxcsr);
3794 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3795 IEM_MC_LOCAL(int64_t, i64Src);
3796 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3797 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3798 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3799
3800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3802 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3803 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3804
3805 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3806 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3807 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3808 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3809 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3810 } IEM_MC_ELSE() {
3811 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3812 } IEM_MC_ENDIF();
3813
3814 IEM_MC_ADVANCE_RIP_AND_FINISH();
3815 IEM_MC_END();
3816 }
3817 }
3818 else
3819 {
3820 if (IEM_IS_MODRM_REG_MODE(bRm))
3821 {
3822 /* greg, XMM */
3823 IEM_MC_BEGIN(3, 2);
3824 IEM_MC_LOCAL(uint32_t, fMxcsr);
3825 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3826 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3827 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3828 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3829
3830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3831 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3832 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3833
3834 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3835 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3836 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3837 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3838 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3839 } IEM_MC_ELSE() {
3840 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3841 } IEM_MC_ENDIF();
3842
3843 IEM_MC_ADVANCE_RIP_AND_FINISH();
3844 IEM_MC_END();
3845 }
3846 else
3847 {
3848 /* greg, [mem32] */
3849 IEM_MC_BEGIN(3, 4);
3850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3851 IEM_MC_LOCAL(uint32_t, fMxcsr);
3852 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3853 IEM_MC_LOCAL(int32_t, i32Src);
3854 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3855 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3856 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3857
3858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3860 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3861 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3862
3863 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3864 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3865 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3866 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3867 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3868 } IEM_MC_ELSE() {
3869 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3870 } IEM_MC_ENDIF();
3871
3872 IEM_MC_ADVANCE_RIP_AND_FINISH();
3873 IEM_MC_END();
3874 }
3875 }
3876}
3877
3878
3879/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3880FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3881{
3882 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3883
3884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3885 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3886 {
3887 if (IEM_IS_MODRM_REG_MODE(bRm))
3888 {
3889 /* XMM, greg64 */
3890 IEM_MC_BEGIN(3, 2);
3891 IEM_MC_LOCAL(uint32_t, fMxcsr);
3892 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3893 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3894 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3895 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3896
3897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3899 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3900
3901 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3902 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3903 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3904 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3905 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3906 } IEM_MC_ELSE() {
3907 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3908 } IEM_MC_ENDIF();
3909
3910 IEM_MC_ADVANCE_RIP_AND_FINISH();
3911 IEM_MC_END();
3912 }
3913 else
3914 {
3915 /* XMM, [mem64] */
3916 IEM_MC_BEGIN(3, 4);
3917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3918 IEM_MC_LOCAL(uint32_t, fMxcsr);
3919 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3920 IEM_MC_LOCAL(int64_t, i64Src);
3921 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3922 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3923 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3924
3925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3927 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3928 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3929
3930 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3931 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3932 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3933 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3934 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3935 } IEM_MC_ELSE() {
3936 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3937 } IEM_MC_ENDIF();
3938
3939 IEM_MC_ADVANCE_RIP_AND_FINISH();
3940 IEM_MC_END();
3941 }
3942 }
3943 else
3944 {
3945 if (IEM_IS_MODRM_REG_MODE(bRm))
3946 {
3947 /* XMM, greg32 */
3948 IEM_MC_BEGIN(3, 2);
3949 IEM_MC_LOCAL(uint32_t, fMxcsr);
3950 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3951 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3952 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3953 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3954
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3957 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3958
3959 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3960 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3961 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3962 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3963 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3964 } IEM_MC_ELSE() {
3965 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3966 } IEM_MC_ENDIF();
3967
3968 IEM_MC_ADVANCE_RIP_AND_FINISH();
3969 IEM_MC_END();
3970 }
3971 else
3972 {
3973 /* XMM, [mem32] */
3974 IEM_MC_BEGIN(3, 4);
3975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3976 IEM_MC_LOCAL(uint32_t, fMxcsr);
3977 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3978 IEM_MC_LOCAL(int32_t, i32Src);
3979 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3980 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3981 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3982
3983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3985 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3986 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3987
3988 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3989 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3990 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3991 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3992 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3993 } IEM_MC_ELSE() {
3994 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3995 } IEM_MC_ENDIF();
3996
3997 IEM_MC_ADVANCE_RIP_AND_FINISH();
3998 IEM_MC_END();
3999 }
4000 }
4001}
4002
4003
4004/**
4005 * @opcode 0x2b
4006 * @opcodesub !11 mr/reg
4007 * @oppfx none
4008 * @opcpuid sse
4009 * @opgroup og_sse1_cachect
4010 * @opxcpttype 1
4011 * @optest op1=1 op2=2 -> op1=2
4012 * @optest op1=0 op2=-42 -> op1=-42
4013 */
4014FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4015{
4016 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4018 if (IEM_IS_MODRM_MEM_MODE(bRm))
4019 {
4020 /*
4021 * memory, register.
4022 */
4023 IEM_MC_BEGIN(0, 2);
4024 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4026
4027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4031
4032 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4033 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4034
4035 IEM_MC_ADVANCE_RIP_AND_FINISH();
4036 IEM_MC_END();
4037 }
4038 /* The register, register encoding is invalid. */
4039 else
4040 IEMOP_RAISE_INVALID_OPCODE_RET();
4041}
4042
4043/**
4044 * @opcode 0x2b
4045 * @opcodesub !11 mr/reg
4046 * @oppfx 0x66
4047 * @opcpuid sse2
4048 * @opgroup og_sse2_cachect
4049 * @opxcpttype 1
4050 * @optest op1=1 op2=2 -> op1=2
4051 * @optest op1=0 op2=-42 -> op1=-42
4052 */
4053FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4054{
4055 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4057 if (IEM_IS_MODRM_MEM_MODE(bRm))
4058 {
4059 /*
4060 * memory, register.
4061 */
4062 IEM_MC_BEGIN(0, 2);
4063 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4065
4066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4068 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4069 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4070
4071 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4072 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4073
4074 IEM_MC_ADVANCE_RIP_AND_FINISH();
4075 IEM_MC_END();
4076 }
4077 /* The register, register encoding is invalid. */
4078 else
4079 IEMOP_RAISE_INVALID_OPCODE_RET();
4080}
4081/* Opcode 0xf3 0x0f 0x2b - invalid */
4082/* Opcode 0xf2 0x0f 0x2b - invalid */
4083
4084
4085/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4086FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4087{
4088 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4090 if (IEM_IS_MODRM_REG_MODE(bRm))
4091 {
4092 /*
4093 * Register, register.
4094 */
4095 IEM_MC_BEGIN(3, 1);
4096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4097 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4098 IEM_MC_LOCAL(uint64_t, u64Dst);
4099 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4100 IEM_MC_ARG(uint64_t, u64Src, 2);
4101 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4102 IEM_MC_PREPARE_FPU_USAGE();
4103 IEM_MC_FPU_TO_MMX_MODE();
4104
4105 IEM_MC_REF_MXCSR(pfMxcsr);
4106 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4107
4108 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4109 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4110 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4111 } IEM_MC_ELSE() {
4112 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4113 } IEM_MC_ENDIF();
4114
4115 IEM_MC_ADVANCE_RIP_AND_FINISH();
4116 IEM_MC_END();
4117 }
4118 else
4119 {
4120 /*
4121 * Register, memory.
4122 */
4123 IEM_MC_BEGIN(3, 2);
4124 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4125 IEM_MC_LOCAL(uint64_t, u64Dst);
4126 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4127 IEM_MC_ARG(uint64_t, u64Src, 2);
4128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4129
4130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4132 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4133 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4134
4135 IEM_MC_PREPARE_FPU_USAGE();
4136 IEM_MC_FPU_TO_MMX_MODE();
4137 IEM_MC_REF_MXCSR(pfMxcsr);
4138
4139 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4140 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4141 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4142 } IEM_MC_ELSE() {
4143 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4144 } IEM_MC_ENDIF();
4145
4146 IEM_MC_ADVANCE_RIP_AND_FINISH();
4147 IEM_MC_END();
4148 }
4149}
4150
4151
4152/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4153FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4154{
4155 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4157 if (IEM_IS_MODRM_REG_MODE(bRm))
4158 {
4159 /*
4160 * Register, register.
4161 */
4162 IEM_MC_BEGIN(3, 1);
4163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4164 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4165 IEM_MC_LOCAL(uint64_t, u64Dst);
4166 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4167 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4169 IEM_MC_PREPARE_FPU_USAGE();
4170 IEM_MC_FPU_TO_MMX_MODE();
4171
4172 IEM_MC_REF_MXCSR(pfMxcsr);
4173 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4174
4175 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4176 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4177 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4178 } IEM_MC_ELSE() {
4179 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4180 } IEM_MC_ENDIF();
4181
4182 IEM_MC_ADVANCE_RIP_AND_FINISH();
4183 IEM_MC_END();
4184 }
4185 else
4186 {
4187 /*
4188 * Register, memory.
4189 */
4190 IEM_MC_BEGIN(3, 3);
4191 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4192 IEM_MC_LOCAL(uint64_t, u64Dst);
4193 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4194 IEM_MC_LOCAL(X86XMMREG, uSrc);
4195 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4197
4198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4200 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4201 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4202
4203 IEM_MC_PREPARE_FPU_USAGE();
4204 IEM_MC_FPU_TO_MMX_MODE();
4205
4206 IEM_MC_REF_MXCSR(pfMxcsr);
4207
4208 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4209 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4210 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4211 } IEM_MC_ELSE() {
4212 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4213 } IEM_MC_ENDIF();
4214
4215 IEM_MC_ADVANCE_RIP_AND_FINISH();
4216 IEM_MC_END();
4217 }
4218}
4219
4220
4221/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4222FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4223{
4224 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4225
4226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4227 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4228 {
4229 if (IEM_IS_MODRM_REG_MODE(bRm))
4230 {
4231 /* greg64, XMM */
4232 IEM_MC_BEGIN(3, 2);
4233 IEM_MC_LOCAL(uint32_t, fMxcsr);
4234 IEM_MC_LOCAL(int64_t, i64Dst);
4235 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4236 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4237 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4238
4239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4240 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4241 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4242
4243 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4244 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4245 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4246 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4247 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4248 } IEM_MC_ELSE() {
4249 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4250 } IEM_MC_ENDIF();
4251
4252 IEM_MC_ADVANCE_RIP_AND_FINISH();
4253 IEM_MC_END();
4254 }
4255 else
4256 {
4257 /* greg64, [mem64] */
4258 IEM_MC_BEGIN(3, 4);
4259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4260 IEM_MC_LOCAL(uint32_t, fMxcsr);
4261 IEM_MC_LOCAL(int64_t, i64Dst);
4262 IEM_MC_LOCAL(uint32_t, u32Src);
4263 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4264 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4265 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4266
4267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4269 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4270 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4271
4272 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4273 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4274 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4275 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4276 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4277 } IEM_MC_ELSE() {
4278 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4279 } IEM_MC_ENDIF();
4280
4281 IEM_MC_ADVANCE_RIP_AND_FINISH();
4282 IEM_MC_END();
4283 }
4284 }
4285 else
4286 {
4287 if (IEM_IS_MODRM_REG_MODE(bRm))
4288 {
4289 /* greg, XMM */
4290 IEM_MC_BEGIN(3, 2);
4291 IEM_MC_LOCAL(uint32_t, fMxcsr);
4292 IEM_MC_LOCAL(int32_t, i32Dst);
4293 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4294 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4295 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4296
4297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4298 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4299 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4300
4301 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4302 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4303 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4304 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4305 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4306 } IEM_MC_ELSE() {
4307 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4308 } IEM_MC_ENDIF();
4309
4310 IEM_MC_ADVANCE_RIP_AND_FINISH();
4311 IEM_MC_END();
4312 }
4313 else
4314 {
4315 /* greg, [mem] */
4316 IEM_MC_BEGIN(3, 4);
4317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4318 IEM_MC_LOCAL(uint32_t, fMxcsr);
4319 IEM_MC_LOCAL(int32_t, i32Dst);
4320 IEM_MC_LOCAL(uint32_t, u32Src);
4321 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4322 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4323 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4324
4325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4328 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4329
4330 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4331 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4332 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4333 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4334 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4335 } IEM_MC_ELSE() {
4336 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4337 } IEM_MC_ENDIF();
4338
4339 IEM_MC_ADVANCE_RIP_AND_FINISH();
4340 IEM_MC_END();
4341 }
4342 }
4343}
4344
4345
4346/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4347FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4348{
4349 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4350
4351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4352 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4353 {
4354 if (IEM_IS_MODRM_REG_MODE(bRm))
4355 {
4356 /* greg64, XMM */
4357 IEM_MC_BEGIN(3, 2);
4358 IEM_MC_LOCAL(uint32_t, fMxcsr);
4359 IEM_MC_LOCAL(int64_t, i64Dst);
4360 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4361 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4362 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4363
4364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4365 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4366 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4367
4368 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4369 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4370 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4371 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4372 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4373 } IEM_MC_ELSE() {
4374 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4375 } IEM_MC_ENDIF();
4376
4377 IEM_MC_ADVANCE_RIP_AND_FINISH();
4378 IEM_MC_END();
4379 }
4380 else
4381 {
4382 /* greg64, [mem64] */
4383 IEM_MC_BEGIN(3, 4);
4384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4385 IEM_MC_LOCAL(uint32_t, fMxcsr);
4386 IEM_MC_LOCAL(int64_t, i64Dst);
4387 IEM_MC_LOCAL(uint64_t, u64Src);
4388 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4389 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4390 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4391
4392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4395 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4396
4397 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4398 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4399 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4400 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4401 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4402 } IEM_MC_ELSE() {
4403 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4404 } IEM_MC_ENDIF();
4405
4406 IEM_MC_ADVANCE_RIP_AND_FINISH();
4407 IEM_MC_END();
4408 }
4409 }
4410 else
4411 {
4412 if (IEM_IS_MODRM_REG_MODE(bRm))
4413 {
4414 /* greg, XMM */
4415 IEM_MC_BEGIN(3, 2);
4416 IEM_MC_LOCAL(uint32_t, fMxcsr);
4417 IEM_MC_LOCAL(int32_t, i32Dst);
4418 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4419 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4420 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4421
4422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4423 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4424 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4425
4426 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4427 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4428 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4429 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4430 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4431 } IEM_MC_ELSE() {
4432 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4433 } IEM_MC_ENDIF();
4434
4435 IEM_MC_ADVANCE_RIP_AND_FINISH();
4436 IEM_MC_END();
4437 }
4438 else
4439 {
4440 /* greg32, [mem32] */
4441 IEM_MC_BEGIN(3, 4);
4442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4443 IEM_MC_LOCAL(uint32_t, fMxcsr);
4444 IEM_MC_LOCAL(int32_t, i32Dst);
4445 IEM_MC_LOCAL(uint64_t, u64Src);
4446 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4447 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4448 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4449
4450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4452 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4453 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4454
4455 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4456 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4457 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4458 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4459 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4460 } IEM_MC_ELSE() {
4461 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4462 } IEM_MC_ENDIF();
4463
4464 IEM_MC_ADVANCE_RIP_AND_FINISH();
4465 IEM_MC_END();
4466 }
4467 }
4468}
4469
4470
4471/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4472FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4473{
4474 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4476 if (IEM_IS_MODRM_REG_MODE(bRm))
4477 {
4478 /*
4479 * Register, register.
4480 */
4481 IEM_MC_BEGIN(3, 1);
4482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4483 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4484 IEM_MC_LOCAL(uint64_t, u64Dst);
4485 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4486 IEM_MC_ARG(uint64_t, u64Src, 2);
4487
4488 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4489 IEM_MC_PREPARE_FPU_USAGE();
4490 IEM_MC_FPU_TO_MMX_MODE();
4491
4492 IEM_MC_REF_MXCSR(pfMxcsr);
4493 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4494
4495 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4496 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4497 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4498 } IEM_MC_ELSE() {
4499 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4500 } IEM_MC_ENDIF();
4501
4502 IEM_MC_ADVANCE_RIP_AND_FINISH();
4503 IEM_MC_END();
4504 }
4505 else
4506 {
4507 /*
4508 * Register, memory.
4509 */
4510 IEM_MC_BEGIN(3, 2);
4511 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4512 IEM_MC_LOCAL(uint64_t, u64Dst);
4513 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4514 IEM_MC_ARG(uint64_t, u64Src, 2);
4515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4516
4517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4519 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4520 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4521
4522 IEM_MC_PREPARE_FPU_USAGE();
4523 IEM_MC_FPU_TO_MMX_MODE();
4524 IEM_MC_REF_MXCSR(pfMxcsr);
4525
4526 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4527 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4528 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4529 } IEM_MC_ELSE() {
4530 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4531 } IEM_MC_ENDIF();
4532
4533 IEM_MC_ADVANCE_RIP_AND_FINISH();
4534 IEM_MC_END();
4535 }
4536}
4537
4538
4539/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4540FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4541{
4542 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4544 if (IEM_IS_MODRM_REG_MODE(bRm))
4545 {
4546 /*
4547 * Register, register.
4548 */
4549 IEM_MC_BEGIN(3, 1);
4550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4551 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4552 IEM_MC_LOCAL(uint64_t, u64Dst);
4553 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4554 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4555
4556 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4557 IEM_MC_PREPARE_FPU_USAGE();
4558 IEM_MC_FPU_TO_MMX_MODE();
4559
4560 IEM_MC_REF_MXCSR(pfMxcsr);
4561 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4562
4563 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4564 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4565 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4566 } IEM_MC_ELSE() {
4567 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4568 } IEM_MC_ENDIF();
4569
4570 IEM_MC_ADVANCE_RIP_AND_FINISH();
4571 IEM_MC_END();
4572 }
4573 else
4574 {
4575 /*
4576 * Register, memory.
4577 */
4578 IEM_MC_BEGIN(3, 3);
4579 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4580 IEM_MC_LOCAL(uint64_t, u64Dst);
4581 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4582 IEM_MC_LOCAL(X86XMMREG, uSrc);
4583 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4585
4586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4588 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4589 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4590
4591 IEM_MC_PREPARE_FPU_USAGE();
4592 IEM_MC_FPU_TO_MMX_MODE();
4593
4594 IEM_MC_REF_MXCSR(pfMxcsr);
4595
4596 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4597 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4598 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4599 } IEM_MC_ELSE() {
4600 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4601 } IEM_MC_ENDIF();
4602
4603 IEM_MC_ADVANCE_RIP_AND_FINISH();
4604 IEM_MC_END();
4605 }
4606}
4607
4608
4609/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4610FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4611{
4612 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4613
4614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4615 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4616 {
4617 if (IEM_IS_MODRM_REG_MODE(bRm))
4618 {
4619 /* greg64, XMM */
4620 IEM_MC_BEGIN(3, 2);
4621 IEM_MC_LOCAL(uint32_t, fMxcsr);
4622 IEM_MC_LOCAL(int64_t, i64Dst);
4623 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4624 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4625 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4626
4627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4628 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4629 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4630
4631 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4632 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4633 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4634 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4635 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4636 } IEM_MC_ELSE() {
4637 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4638 } IEM_MC_ENDIF();
4639
4640 IEM_MC_ADVANCE_RIP_AND_FINISH();
4641 IEM_MC_END();
4642 }
4643 else
4644 {
4645 /* greg64, [mem64] */
4646 IEM_MC_BEGIN(3, 4);
4647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4648 IEM_MC_LOCAL(uint32_t, fMxcsr);
4649 IEM_MC_LOCAL(int64_t, i64Dst);
4650 IEM_MC_LOCAL(uint32_t, u32Src);
4651 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4652 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4653 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4654
4655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4657 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4658 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4659
4660 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4661 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4662 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4663 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4664 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4665 } IEM_MC_ELSE() {
4666 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4667 } IEM_MC_ENDIF();
4668
4669 IEM_MC_ADVANCE_RIP_AND_FINISH();
4670 IEM_MC_END();
4671 }
4672 }
4673 else
4674 {
4675 if (IEM_IS_MODRM_REG_MODE(bRm))
4676 {
4677 /* greg, XMM */
4678 IEM_MC_BEGIN(3, 2);
4679 IEM_MC_LOCAL(uint32_t, fMxcsr);
4680 IEM_MC_LOCAL(int32_t, i32Dst);
4681 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4682 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4683 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4684
4685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4686 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4687 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4688
4689 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4690 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4691 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4692 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4693 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4694 } IEM_MC_ELSE() {
4695 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4696 } IEM_MC_ENDIF();
4697
4698 IEM_MC_ADVANCE_RIP_AND_FINISH();
4699 IEM_MC_END();
4700 }
4701 else
4702 {
4703 /* greg, [mem] */
4704 IEM_MC_BEGIN(3, 4);
4705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4706 IEM_MC_LOCAL(uint32_t, fMxcsr);
4707 IEM_MC_LOCAL(int32_t, i32Dst);
4708 IEM_MC_LOCAL(uint32_t, u32Src);
4709 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4710 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4711 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4712
4713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4715 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4716 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4717
4718 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4719 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4720 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4721 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4722 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4723 } IEM_MC_ELSE() {
4724 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4725 } IEM_MC_ENDIF();
4726
4727 IEM_MC_ADVANCE_RIP_AND_FINISH();
4728 IEM_MC_END();
4729 }
4730 }
4731}
4732
4733
4734/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4735FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4736{
4737 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4738
4739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4741 {
4742 if (IEM_IS_MODRM_REG_MODE(bRm))
4743 {
4744 /* greg64, XMM */
4745 IEM_MC_BEGIN(3, 2);
4746 IEM_MC_LOCAL(uint32_t, fMxcsr);
4747 IEM_MC_LOCAL(int64_t, i64Dst);
4748 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4749 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4750 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4751
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4754 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4755
4756 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4757 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4758 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4759 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4760 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4761 } IEM_MC_ELSE() {
4762 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4763 } IEM_MC_ENDIF();
4764
4765 IEM_MC_ADVANCE_RIP_AND_FINISH();
4766 IEM_MC_END();
4767 }
4768 else
4769 {
4770 /* greg64, [mem64] */
4771 IEM_MC_BEGIN(3, 4);
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4773 IEM_MC_LOCAL(uint32_t, fMxcsr);
4774 IEM_MC_LOCAL(int64_t, i64Dst);
4775 IEM_MC_LOCAL(uint64_t, u64Src);
4776 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4777 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4778 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4779
4780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4782 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4783 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4784
4785 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4786 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4787 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4788 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4789 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4790 } IEM_MC_ELSE() {
4791 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4792 } IEM_MC_ENDIF();
4793
4794 IEM_MC_ADVANCE_RIP_AND_FINISH();
4795 IEM_MC_END();
4796 }
4797 }
4798 else
4799 {
4800 if (IEM_IS_MODRM_REG_MODE(bRm))
4801 {
4802 /* greg32, XMM */
4803 IEM_MC_BEGIN(3, 2);
4804 IEM_MC_LOCAL(uint32_t, fMxcsr);
4805 IEM_MC_LOCAL(int32_t, i32Dst);
4806 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4807 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4808 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4809
4810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4812 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4813
4814 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4815 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4816 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4817 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4818 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4819 } IEM_MC_ELSE() {
4820 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4821 } IEM_MC_ENDIF();
4822
4823 IEM_MC_ADVANCE_RIP_AND_FINISH();
4824 IEM_MC_END();
4825 }
4826 else
4827 {
4828 /* greg32, [mem64] */
4829 IEM_MC_BEGIN(3, 4);
4830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4831 IEM_MC_LOCAL(uint32_t, fMxcsr);
4832 IEM_MC_LOCAL(int32_t, i32Dst);
4833 IEM_MC_LOCAL(uint64_t, u64Src);
4834 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4835 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4836 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4837
4838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4840 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4841 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4842
4843 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4844 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4845 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4846 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4847 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4848 } IEM_MC_ELSE() {
4849 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4850 } IEM_MC_ENDIF();
4851
4852 IEM_MC_ADVANCE_RIP_AND_FINISH();
4853 IEM_MC_END();
4854 }
4855 }
4856}
4857
4858
4859/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4860FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4861{
4862 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4864 if (IEM_IS_MODRM_REG_MODE(bRm))
4865 {
4866 /*
4867 * Register, register.
4868 */
4869 IEM_MC_BEGIN(4, 1);
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4871 IEM_MC_LOCAL(uint32_t, fEFlags);
4872 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4873 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4874 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4875 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4876 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4877 IEM_MC_PREPARE_SSE_USAGE();
4878 IEM_MC_FETCH_EFLAGS(fEFlags);
4879 IEM_MC_REF_MXCSR(pfMxcsr);
4880 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4881 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4882 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4883 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4884 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4885 } IEM_MC_ELSE() {
4886 IEM_MC_COMMIT_EFLAGS(fEFlags);
4887 } IEM_MC_ENDIF();
4888
4889 IEM_MC_ADVANCE_RIP_AND_FINISH();
4890 IEM_MC_END();
4891 }
4892 else
4893 {
4894 /*
4895 * Register, memory.
4896 */
4897 IEM_MC_BEGIN(4, 3);
4898 IEM_MC_LOCAL(uint32_t, fEFlags);
4899 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4900 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4901 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4902 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4903 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4905
4906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4908 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4909 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4910
4911 IEM_MC_PREPARE_SSE_USAGE();
4912 IEM_MC_FETCH_EFLAGS(fEFlags);
4913 IEM_MC_REF_MXCSR(pfMxcsr);
4914 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4915 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4916 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4917 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4918 } IEM_MC_ELSE() {
4919 IEM_MC_COMMIT_EFLAGS(fEFlags);
4920 } IEM_MC_ENDIF();
4921
4922 IEM_MC_ADVANCE_RIP_AND_FINISH();
4923 IEM_MC_END();
4924 }
4925}
4926
4927
4928/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4929FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4930{
4931 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4933 if (IEM_IS_MODRM_REG_MODE(bRm))
4934 {
4935 /*
4936 * Register, register.
4937 */
4938 IEM_MC_BEGIN(4, 1);
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4940 IEM_MC_LOCAL(uint32_t, fEFlags);
4941 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4942 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4943 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4944 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4945 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4946 IEM_MC_PREPARE_SSE_USAGE();
4947 IEM_MC_FETCH_EFLAGS(fEFlags);
4948 IEM_MC_REF_MXCSR(pfMxcsr);
4949 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4950 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4951 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4952 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4953 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4954 } IEM_MC_ELSE() {
4955 IEM_MC_COMMIT_EFLAGS(fEFlags);
4956 } IEM_MC_ENDIF();
4957
4958 IEM_MC_ADVANCE_RIP_AND_FINISH();
4959 IEM_MC_END();
4960 }
4961 else
4962 {
4963 /*
4964 * Register, memory.
4965 */
4966 IEM_MC_BEGIN(4, 3);
4967 IEM_MC_LOCAL(uint32_t, fEFlags);
4968 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4969 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4970 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4971 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4972 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4974
4975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4977 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4978 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4979
4980 IEM_MC_PREPARE_SSE_USAGE();
4981 IEM_MC_FETCH_EFLAGS(fEFlags);
4982 IEM_MC_REF_MXCSR(pfMxcsr);
4983 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4984 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4985 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4986 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4987 } IEM_MC_ELSE() {
4988 IEM_MC_COMMIT_EFLAGS(fEFlags);
4989 } IEM_MC_ENDIF();
4990
4991 IEM_MC_ADVANCE_RIP_AND_FINISH();
4992 IEM_MC_END();
4993 }
4994}
4995
4996
4997/* Opcode 0xf3 0x0f 0x2e - invalid */
4998/* Opcode 0xf2 0x0f 0x2e - invalid */
4999
5000
5001/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5002FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5003{
5004 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5006 if (IEM_IS_MODRM_REG_MODE(bRm))
5007 {
5008 /*
5009 * Register, register.
5010 */
5011 IEM_MC_BEGIN(4, 1);
5012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5013 IEM_MC_LOCAL(uint32_t, fEFlags);
5014 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5015 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5016 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5017 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5019 IEM_MC_PREPARE_SSE_USAGE();
5020 IEM_MC_FETCH_EFLAGS(fEFlags);
5021 IEM_MC_REF_MXCSR(pfMxcsr);
5022 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5023 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5024 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5025 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5026 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5027 } IEM_MC_ELSE() {
5028 IEM_MC_COMMIT_EFLAGS(fEFlags);
5029 } IEM_MC_ENDIF();
5030
5031 IEM_MC_ADVANCE_RIP_AND_FINISH();
5032 IEM_MC_END();
5033 }
5034 else
5035 {
5036 /*
5037 * Register, memory.
5038 */
5039 IEM_MC_BEGIN(4, 3);
5040 IEM_MC_LOCAL(uint32_t, fEFlags);
5041 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5042 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5043 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5044 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5045 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5047
5048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5050 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5051 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5052
5053 IEM_MC_PREPARE_SSE_USAGE();
5054 IEM_MC_FETCH_EFLAGS(fEFlags);
5055 IEM_MC_REF_MXCSR(pfMxcsr);
5056 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5057 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5058 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5059 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5060 } IEM_MC_ELSE() {
5061 IEM_MC_COMMIT_EFLAGS(fEFlags);
5062 } IEM_MC_ENDIF();
5063
5064 IEM_MC_ADVANCE_RIP_AND_FINISH();
5065 IEM_MC_END();
5066 }
5067}
5068
5069
5070/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5071FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5072{
5073 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5075 if (IEM_IS_MODRM_REG_MODE(bRm))
5076 {
5077 /*
5078 * Register, register.
5079 */
5080 IEM_MC_BEGIN(4, 1);
5081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5082 IEM_MC_LOCAL(uint32_t, fEFlags);
5083 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5084 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5085 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5086 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5087 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5088 IEM_MC_PREPARE_SSE_USAGE();
5089 IEM_MC_FETCH_EFLAGS(fEFlags);
5090 IEM_MC_REF_MXCSR(pfMxcsr);
5091 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5092 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5093 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5094 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5095 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5096 } IEM_MC_ELSE() {
5097 IEM_MC_COMMIT_EFLAGS(fEFlags);
5098 } IEM_MC_ENDIF();
5099
5100 IEM_MC_ADVANCE_RIP_AND_FINISH();
5101 IEM_MC_END();
5102 }
5103 else
5104 {
5105 /*
5106 * Register, memory.
5107 */
5108 IEM_MC_BEGIN(4, 3);
5109 IEM_MC_LOCAL(uint32_t, fEFlags);
5110 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5111 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5112 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5113 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5114 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5116
5117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5119 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5120 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5121
5122 IEM_MC_PREPARE_SSE_USAGE();
5123 IEM_MC_FETCH_EFLAGS(fEFlags);
5124 IEM_MC_REF_MXCSR(pfMxcsr);
5125 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5126 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5127 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5128 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5129 } IEM_MC_ELSE() {
5130 IEM_MC_COMMIT_EFLAGS(fEFlags);
5131 } IEM_MC_ENDIF();
5132
5133 IEM_MC_ADVANCE_RIP_AND_FINISH();
5134 IEM_MC_END();
5135 }
5136}
5137
5138
5139/* Opcode 0xf3 0x0f 0x2f - invalid */
5140/* Opcode 0xf2 0x0f 0x2f - invalid */
5141
5142/** Opcode 0x0f 0x30. */
5143FNIEMOP_DEF(iemOp_wrmsr)
5144{
5145 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5147 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wrmsr);
5148}
5149
5150
5151/** Opcode 0x0f 0x31. */
5152FNIEMOP_DEF(iemOp_rdtsc)
5153{
5154 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5156 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtsc);
5157}
5158
5159
5160/** Opcode 0x0f 0x33. */
5161FNIEMOP_DEF(iemOp_rdmsr)
5162{
5163 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdmsr);
5166}
5167
5168
5169/** Opcode 0x0f 0x34. */
5170FNIEMOP_DEF(iemOp_rdpmc)
5171{
5172 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdpmc);
5175}
5176
5177
5178/** Opcode 0x0f 0x34. */
5179FNIEMOP_DEF(iemOp_sysenter)
5180{
5181 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5183 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5184 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5185 iemCImpl_sysenter);
5186}
5187
5188/** Opcode 0x0f 0x35. */
5189FNIEMOP_DEF(iemOp_sysexit)
5190{
5191 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5193 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5194 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5195 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5196}
5197
5198/** Opcode 0x0f 0x37. */
5199FNIEMOP_STUB(iemOp_getsec);
5200
5201
5202/** Opcode 0x0f 0x38. */
5203FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5204{
5205#ifdef IEM_WITH_THREE_0F_38
5206 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5207 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5208#else
5209 IEMOP_BITCH_ABOUT_STUB();
5210 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5211#endif
5212}
5213
5214
5215/** Opcode 0x0f 0x3a. */
5216FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5217{
5218#ifdef IEM_WITH_THREE_0F_3A
5219 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5220 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5221#else
5222 IEMOP_BITCH_ABOUT_STUB();
5223 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5224#endif
5225}
5226
5227
5228/**
5229 * Implements a conditional move.
5230 *
5231 * Wish there was an obvious way to do this where we could share and reduce
5232 * code bloat.
5233 *
5234 * @param a_Cnd The conditional "microcode" operation.
5235 */
5236#define CMOV_X(a_Cnd) \
5237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5238 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5239 { \
5240 switch (pVCpu->iem.s.enmEffOpSize) \
5241 { \
5242 case IEMMODE_16BIT: \
5243 IEM_MC_BEGIN(0, 1); \
5244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5245 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5246 a_Cnd { \
5247 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5248 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5249 } IEM_MC_ENDIF(); \
5250 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5251 IEM_MC_END(); \
5252 break; \
5253 \
5254 case IEMMODE_32BIT: \
5255 IEM_MC_BEGIN(0, 1); \
5256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5257 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5258 a_Cnd { \
5259 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5260 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5261 } IEM_MC_ELSE() { \
5262 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5263 } IEM_MC_ENDIF(); \
5264 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5265 IEM_MC_END(); \
5266 break; \
5267 \
5268 case IEMMODE_64BIT: \
5269 IEM_MC_BEGIN(0, 1); \
5270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5271 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5272 a_Cnd { \
5273 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5274 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5275 } IEM_MC_ENDIF(); \
5276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5277 IEM_MC_END(); \
5278 break; \
5279 \
5280 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5281 } \
5282 } \
5283 else \
5284 { \
5285 switch (pVCpu->iem.s.enmEffOpSize) \
5286 { \
5287 case IEMMODE_16BIT: \
5288 IEM_MC_BEGIN(0, 2); \
5289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5290 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5293 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5294 a_Cnd { \
5295 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5296 } IEM_MC_ENDIF(); \
5297 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5298 IEM_MC_END(); \
5299 break; \
5300 \
5301 case IEMMODE_32BIT: \
5302 IEM_MC_BEGIN(0, 2); \
5303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5304 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5307 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5308 a_Cnd { \
5309 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5310 } IEM_MC_ELSE() { \
5311 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5312 } IEM_MC_ENDIF(); \
5313 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5314 IEM_MC_END(); \
5315 break; \
5316 \
5317 case IEMMODE_64BIT: \
5318 IEM_MC_BEGIN(0, 2); \
5319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5320 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5323 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5324 a_Cnd { \
5325 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5326 } IEM_MC_ENDIF(); \
5327 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5328 IEM_MC_END(); \
5329 break; \
5330 \
5331 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5332 } \
5333 } do {} while (0)
5334
5335
5336
5337/** Opcode 0x0f 0x40. */
5338FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5339{
5340 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5341 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5342}
5343
5344
5345/** Opcode 0x0f 0x41. */
5346FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5347{
5348 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5349 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5350}
5351
5352
5353/** Opcode 0x0f 0x42. */
5354FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5355{
5356 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5357 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5358}
5359
5360
5361/** Opcode 0x0f 0x43. */
5362FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5363{
5364 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5365 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5366}
5367
5368
5369/** Opcode 0x0f 0x44. */
5370FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5371{
5372 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5373 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5374}
5375
5376
5377/** Opcode 0x0f 0x45. */
5378FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5379{
5380 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5381 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5382}
5383
5384
5385/** Opcode 0x0f 0x46. */
5386FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5387{
5388 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5389 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5390}
5391
5392
5393/** Opcode 0x0f 0x47. */
5394FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5395{
5396 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5397 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5398}
5399
5400
5401/** Opcode 0x0f 0x48. */
5402FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5403{
5404 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5405 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5406}
5407
5408
5409/** Opcode 0x0f 0x49. */
5410FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5411{
5412 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5413 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5414}
5415
5416
5417/** Opcode 0x0f 0x4a. */
5418FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5419{
5420 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5421 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5422}
5423
5424
5425/** Opcode 0x0f 0x4b. */
5426FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5427{
5428 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5429 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5430}
5431
5432
5433/** Opcode 0x0f 0x4c. */
5434FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5435{
5436 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5437 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5438}
5439
5440
5441/** Opcode 0x0f 0x4d. */
5442FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5443{
5444 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5445 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5446}
5447
5448
5449/** Opcode 0x0f 0x4e. */
5450FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5451{
5452 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5453 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5454}
5455
5456
5457/** Opcode 0x0f 0x4f. */
5458FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5459{
5460 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5461 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5462}
5463
5464#undef CMOV_X
5465
5466/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5467FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5468{
5469 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5471 if (IEM_IS_MODRM_REG_MODE(bRm))
5472 {
5473 /*
5474 * Register, register.
5475 */
5476 IEM_MC_BEGIN(2, 1);
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5478 IEM_MC_LOCAL(uint8_t, u8Dst);
5479 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5480 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5482 IEM_MC_PREPARE_SSE_USAGE();
5483 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5484 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5485 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5486 IEM_MC_ADVANCE_RIP_AND_FINISH();
5487 IEM_MC_END();
5488 }
5489 /* No memory operand. */
5490 else
5491 IEMOP_RAISE_INVALID_OPCODE_RET();
5492}
5493
5494
5495/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5496FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5497{
5498 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5500 if (IEM_IS_MODRM_REG_MODE(bRm))
5501 {
5502 /*
5503 * Register, register.
5504 */
5505 IEM_MC_BEGIN(2, 1);
5506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5507 IEM_MC_LOCAL(uint8_t, u8Dst);
5508 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5509 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5510 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5511 IEM_MC_PREPARE_SSE_USAGE();
5512 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5513 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5514 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5515 IEM_MC_ADVANCE_RIP_AND_FINISH();
5516 IEM_MC_END();
5517 }
5518 /* No memory operand. */
5519 else
5520 IEMOP_RAISE_INVALID_OPCODE_RET();
5521
5522}
5523
5524
5525/* Opcode 0xf3 0x0f 0x50 - invalid */
5526/* Opcode 0xf2 0x0f 0x50 - invalid */
5527
5528
5529/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5530FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5531{
5532 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5533 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5534}
5535
5536
5537/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5538FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5539{
5540 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5541 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5542}
5543
5544
5545/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5546FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5547{
5548 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5549 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5550}
5551
5552
5553/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5554FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5555{
5556 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5557 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5558}
5559
5560
5561/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5562FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5563{
5564 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5565 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5566}
5567
5568
5569/* Opcode 0x66 0x0f 0x52 - invalid */
5570
5571
5572/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5573FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5574{
5575 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5576 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5577}
5578
5579
5580/* Opcode 0xf2 0x0f 0x52 - invalid */
5581
5582/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5583FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5584/* Opcode 0x66 0x0f 0x53 - invalid */
5585/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5586FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5587/* Opcode 0xf2 0x0f 0x53 - invalid */
5588
5589
5590/** Opcode 0x0f 0x54 - andps Vps, Wps */
5591FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5592{
5593 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5594 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5595}
5596
5597
5598/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5599FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5600{
5601 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5602 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5603}
5604
5605
5606/* Opcode 0xf3 0x0f 0x54 - invalid */
5607/* Opcode 0xf2 0x0f 0x54 - invalid */
5608
5609
5610/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5611FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5612{
5613 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5614 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5615}
5616
5617
5618/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5619FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5620{
5621 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5622 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5623}
5624
5625
5626/* Opcode 0xf3 0x0f 0x55 - invalid */
5627/* Opcode 0xf2 0x0f 0x55 - invalid */
5628
5629
5630/** Opcode 0x0f 0x56 - orps Vps, Wps */
5631FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5632{
5633 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5634 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5635}
5636
5637
5638/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5639FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5640{
5641 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5642 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5643}
5644
5645
5646/* Opcode 0xf3 0x0f 0x56 - invalid */
5647/* Opcode 0xf2 0x0f 0x56 - invalid */
5648
5649
5650/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5651FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5652{
5653 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5654 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5655}
5656
5657
5658/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5659FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5660{
5661 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5662 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5663}
5664
5665
5666/* Opcode 0xf3 0x0f 0x57 - invalid */
5667/* Opcode 0xf2 0x0f 0x57 - invalid */
5668
5669/** Opcode 0x0f 0x58 - addps Vps, Wps */
5670FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5671{
5672 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5673 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5674}
5675
5676
5677/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5678FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5679{
5680 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5681 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5682}
5683
5684
5685/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5686FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5687{
5688 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5689 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5690}
5691
5692
5693/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5694FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5695{
5696 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5697 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5698}
5699
5700
5701/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5702FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5703{
5704 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5705 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5706}
5707
5708
5709/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5710FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5711{
5712 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5713 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5714}
5715
5716
5717/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5718FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5719{
5720 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5721 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5722}
5723
5724
5725/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5726FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5727{
5728 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5729 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5730}
5731
5732
5733/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5734FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5735{
5736 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5737 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5738}
5739
5740
5741/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5742FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5743{
5744 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5745 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5746}
5747
5748
5749/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5750FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5751{
5752 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5753 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5754}
5755
5756
5757/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5758FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5759{
5760 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5761 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5762}
5763
5764
5765/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5766FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5767{
5768 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5769 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5770}
5771
5772
5773/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5774FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5775{
5776 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5777 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5778}
5779
5780
5781/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5782FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5783{
5784 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5785 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5786}
5787
5788
5789/* Opcode 0xf2 0x0f 0x5b - invalid */
5790
5791
5792/** Opcode 0x0f 0x5c - subps Vps, Wps */
5793FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5794{
5795 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5796 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5797}
5798
5799
5800/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5801FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5802{
5803 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5804 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5805}
5806
5807
5808/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5809FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5810{
5811 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5812 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5813}
5814
5815
5816/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5817FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5818{
5819 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5820 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5821}
5822
5823
5824/** Opcode 0x0f 0x5d - minps Vps, Wps */
5825FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5826{
5827 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5828 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5829}
5830
5831
5832/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5833FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5834{
5835 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5836 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5837}
5838
5839
5840/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5841FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5842{
5843 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5844 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5845}
5846
5847
5848/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5849FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5850{
5851 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5852 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5853}
5854
5855
5856/** Opcode 0x0f 0x5e - divps Vps, Wps */
5857FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5858{
5859 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5860 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5861}
5862
5863
5864/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5865FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5866{
5867 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5868 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5869}
5870
5871
5872/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5873FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5874{
5875 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5876 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5877}
5878
5879
5880/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5881FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5882{
5883 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5884 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5885}
5886
5887
5888/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5889FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5890{
5891 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5892 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5893}
5894
5895
5896/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5897FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5898{
5899 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5900 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5901}
5902
5903
5904/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5905FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5906{
5907 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5908 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5909}
5910
5911
5912/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5913FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5914{
5915 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5916 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5917}
5918
5919
5920/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5921FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5922{
5923 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5924 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5925}
5926
5927
5928/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5929FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5930{
5931 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5932 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5933}
5934
5935
5936/* Opcode 0xf3 0x0f 0x60 - invalid */
5937
5938
5939/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5940FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5941{
5942 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5943 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5944 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5945}
5946
5947
5948/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5949FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5950{
5951 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5952 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5953}
5954
5955
5956/* Opcode 0xf3 0x0f 0x61 - invalid */
5957
5958
5959/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5960FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5961{
5962 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5963 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5964}
5965
5966
5967/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5968FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5969{
5970 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5971 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5972}
5973
5974
5975/* Opcode 0xf3 0x0f 0x62 - invalid */
5976
5977
5978
5979/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5980FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5981{
5982 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5983 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5984}
5985
5986
5987/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5988FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5989{
5990 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5991 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5992}
5993
5994
5995/* Opcode 0xf3 0x0f 0x63 - invalid */
5996
5997
5998/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5999FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
6000{
6001 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6002 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
6003}
6004
6005
6006/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6007FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6008{
6009 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6010 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6011}
6012
6013
6014/* Opcode 0xf3 0x0f 0x64 - invalid */
6015
6016
6017/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6018FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6019{
6020 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6021 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6022}
6023
6024
6025/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6026FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6027{
6028 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6029 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6030}
6031
6032
6033/* Opcode 0xf3 0x0f 0x65 - invalid */
6034
6035
6036/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6037FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6038{
6039 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6040 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6041}
6042
6043
6044/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6045FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6046{
6047 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6048 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6049}
6050
6051
6052/* Opcode 0xf3 0x0f 0x66 - invalid */
6053
6054
6055/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6056FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6057{
6058 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6059 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6060}
6061
6062
6063/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6064FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6065{
6066 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6067 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6068}
6069
6070
6071/* Opcode 0xf3 0x0f 0x67 - invalid */
6072
6073
6074/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6075 * @note Intel and AMD both uses Qd for the second parameter, however they
6076 * both list it as a mmX/mem64 operand and intel describes it as being
6077 * loaded as a qword, so it should be Qq, shouldn't it? */
6078FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6079{
6080 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6081 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6082}
6083
6084
6085/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6086FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6087{
6088 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6089 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6090}
6091
6092
6093/* Opcode 0xf3 0x0f 0x68 - invalid */
6094
6095
6096/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6097 * @note Intel and AMD both uses Qd for the second parameter, however they
6098 * both list it as a mmX/mem64 operand and intel describes it as being
6099 * loaded as a qword, so it should be Qq, shouldn't it? */
6100FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6101{
6102 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6103 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6104}
6105
6106
6107/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6108FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6109{
6110 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6111 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6112
6113}
6114
6115
6116/* Opcode 0xf3 0x0f 0x69 - invalid */
6117
6118
6119/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6120 * @note Intel and AMD both uses Qd for the second parameter, however they
6121 * both list it as a mmX/mem64 operand and intel describes it as being
6122 * loaded as a qword, so it should be Qq, shouldn't it? */
6123FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6124{
6125 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6126 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6127}
6128
6129
6130/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6131FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6132{
6133 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6134 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6135}
6136
6137
6138/* Opcode 0xf3 0x0f 0x6a - invalid */
6139
6140
6141/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6142FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6143{
6144 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6145 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6146}
6147
6148
6149/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6150FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6151{
6152 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6153 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6154}
6155
6156
6157/* Opcode 0xf3 0x0f 0x6b - invalid */
6158
6159
6160/* Opcode 0x0f 0x6c - invalid */
6161
6162
6163/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6164FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6165{
6166 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6167 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6168}
6169
6170
6171/* Opcode 0xf3 0x0f 0x6c - invalid */
6172/* Opcode 0xf2 0x0f 0x6c - invalid */
6173
6174
6175/* Opcode 0x0f 0x6d - invalid */
6176
6177
6178/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6179FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6180{
6181 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6182 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6183}
6184
6185
6186/* Opcode 0xf3 0x0f 0x6d - invalid */
6187
6188
6189FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6190{
6191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6192 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6193 {
6194 /**
6195 * @opcode 0x6e
6196 * @opcodesub rex.w=1
6197 * @oppfx none
6198 * @opcpuid mmx
6199 * @opgroup og_mmx_datamove
6200 * @opxcpttype 5
6201 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6202 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6203 */
6204 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6205 if (IEM_IS_MODRM_REG_MODE(bRm))
6206 {
6207 /* MMX, greg64 */
6208 IEM_MC_BEGIN(0, 1);
6209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6210 IEM_MC_LOCAL(uint64_t, u64Tmp);
6211
6212 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6213 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6214 IEM_MC_FPU_TO_MMX_MODE();
6215
6216 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6217 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6218
6219 IEM_MC_ADVANCE_RIP_AND_FINISH();
6220 IEM_MC_END();
6221 }
6222 else
6223 {
6224 /* MMX, [mem64] */
6225 IEM_MC_BEGIN(0, 2);
6226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6227 IEM_MC_LOCAL(uint64_t, u64Tmp);
6228
6229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6231 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6232 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6233 IEM_MC_FPU_TO_MMX_MODE();
6234
6235 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6236 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6237
6238 IEM_MC_ADVANCE_RIP_AND_FINISH();
6239 IEM_MC_END();
6240 }
6241 }
6242 else
6243 {
6244 /**
6245 * @opdone
6246 * @opcode 0x6e
6247 * @opcodesub rex.w=0
6248 * @oppfx none
6249 * @opcpuid mmx
6250 * @opgroup og_mmx_datamove
6251 * @opxcpttype 5
6252 * @opfunction iemOp_movd_q_Pd_Ey
6253 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6254 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6255 */
6256 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6257 if (IEM_IS_MODRM_REG_MODE(bRm))
6258 {
6259 /* MMX, greg32 */
6260 IEM_MC_BEGIN(0, 1);
6261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6262 IEM_MC_LOCAL(uint32_t, u32Tmp);
6263
6264 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6265 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6266 IEM_MC_FPU_TO_MMX_MODE();
6267
6268 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6269 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6270
6271 IEM_MC_ADVANCE_RIP_AND_FINISH();
6272 IEM_MC_END();
6273 }
6274 else
6275 {
6276 /* MMX, [mem32] */
6277 IEM_MC_BEGIN(0, 2);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6279 IEM_MC_LOCAL(uint32_t, u32Tmp);
6280
6281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6284 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6285 IEM_MC_FPU_TO_MMX_MODE();
6286
6287 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6288 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6289
6290 IEM_MC_ADVANCE_RIP_AND_FINISH();
6291 IEM_MC_END();
6292 }
6293 }
6294}
6295
6296FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6297{
6298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6299 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6300 {
6301 /**
6302 * @opcode 0x6e
6303 * @opcodesub rex.w=1
6304 * @oppfx 0x66
6305 * @opcpuid sse2
6306 * @opgroup og_sse2_simdint_datamove
6307 * @opxcpttype 5
6308 * @optest 64-bit / op1=1 op2=2 -> op1=2
6309 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6310 */
6311 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6312 if (IEM_IS_MODRM_REG_MODE(bRm))
6313 {
6314 /* XMM, greg64 */
6315 IEM_MC_BEGIN(0, 1);
6316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6317 IEM_MC_LOCAL(uint64_t, u64Tmp);
6318
6319 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6320 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6321
6322 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6323 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6324
6325 IEM_MC_ADVANCE_RIP_AND_FINISH();
6326 IEM_MC_END();
6327 }
6328 else
6329 {
6330 /* XMM, [mem64] */
6331 IEM_MC_BEGIN(0, 2);
6332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6333 IEM_MC_LOCAL(uint64_t, u64Tmp);
6334
6335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6337 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6338 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6339
6340 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6341 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6342
6343 IEM_MC_ADVANCE_RIP_AND_FINISH();
6344 IEM_MC_END();
6345 }
6346 }
6347 else
6348 {
6349 /**
6350 * @opdone
6351 * @opcode 0x6e
6352 * @opcodesub rex.w=0
6353 * @oppfx 0x66
6354 * @opcpuid sse2
6355 * @opgroup og_sse2_simdint_datamove
6356 * @opxcpttype 5
6357 * @opfunction iemOp_movd_q_Vy_Ey
6358 * @optest op1=1 op2=2 -> op1=2
6359 * @optest op1=0 op2=-42 -> op1=-42
6360 */
6361 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6362 if (IEM_IS_MODRM_REG_MODE(bRm))
6363 {
6364 /* XMM, greg32 */
6365 IEM_MC_BEGIN(0, 1);
6366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6367 IEM_MC_LOCAL(uint32_t, u32Tmp);
6368
6369 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6370 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6371
6372 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6373 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6374
6375 IEM_MC_ADVANCE_RIP_AND_FINISH();
6376 IEM_MC_END();
6377 }
6378 else
6379 {
6380 /* XMM, [mem32] */
6381 IEM_MC_BEGIN(0, 2);
6382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6383 IEM_MC_LOCAL(uint32_t, u32Tmp);
6384
6385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6387 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6388 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6389
6390 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6391 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6392
6393 IEM_MC_ADVANCE_RIP_AND_FINISH();
6394 IEM_MC_END();
6395 }
6396 }
6397}
6398
6399/* Opcode 0xf3 0x0f 0x6e - invalid */
6400
6401
6402/**
6403 * @opcode 0x6f
6404 * @oppfx none
6405 * @opcpuid mmx
6406 * @opgroup og_mmx_datamove
6407 * @opxcpttype 5
6408 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6409 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6410 */
6411FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6412{
6413 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6415 if (IEM_IS_MODRM_REG_MODE(bRm))
6416 {
6417 /*
6418 * Register, register.
6419 */
6420 IEM_MC_BEGIN(0, 1);
6421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6422 IEM_MC_LOCAL(uint64_t, u64Tmp);
6423
6424 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6425 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6426 IEM_MC_FPU_TO_MMX_MODE();
6427
6428 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6429 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6430
6431 IEM_MC_ADVANCE_RIP_AND_FINISH();
6432 IEM_MC_END();
6433 }
6434 else
6435 {
6436 /*
6437 * Register, memory.
6438 */
6439 IEM_MC_BEGIN(0, 2);
6440 IEM_MC_LOCAL(uint64_t, u64Tmp);
6441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6442
6443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6445 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6446 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6447 IEM_MC_FPU_TO_MMX_MODE();
6448
6449 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6450 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6451
6452 IEM_MC_ADVANCE_RIP_AND_FINISH();
6453 IEM_MC_END();
6454 }
6455}
6456
6457/**
6458 * @opcode 0x6f
6459 * @oppfx 0x66
6460 * @opcpuid sse2
6461 * @opgroup og_sse2_simdint_datamove
6462 * @opxcpttype 1
6463 * @optest op1=1 op2=2 -> op1=2
6464 * @optest op1=0 op2=-42 -> op1=-42
6465 */
6466FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6467{
6468 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6470 if (IEM_IS_MODRM_REG_MODE(bRm))
6471 {
6472 /*
6473 * Register, register.
6474 */
6475 IEM_MC_BEGIN(0, 0);
6476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6477
6478 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6479 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6480
6481 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6482 IEM_GET_MODRM_RM(pVCpu, bRm));
6483 IEM_MC_ADVANCE_RIP_AND_FINISH();
6484 IEM_MC_END();
6485 }
6486 else
6487 {
6488 /*
6489 * Register, memory.
6490 */
6491 IEM_MC_BEGIN(0, 2);
6492 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6494
6495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6499
6500 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6501 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6502
6503 IEM_MC_ADVANCE_RIP_AND_FINISH();
6504 IEM_MC_END();
6505 }
6506}
6507
6508/**
6509 * @opcode 0x6f
6510 * @oppfx 0xf3
6511 * @opcpuid sse2
6512 * @opgroup og_sse2_simdint_datamove
6513 * @opxcpttype 4UA
6514 * @optest op1=1 op2=2 -> op1=2
6515 * @optest op1=0 op2=-42 -> op1=-42
6516 */
6517FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6518{
6519 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6521 if (IEM_IS_MODRM_REG_MODE(bRm))
6522 {
6523 /*
6524 * Register, register.
6525 */
6526 IEM_MC_BEGIN(0, 0);
6527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6528 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6529 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6530 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6531 IEM_GET_MODRM_RM(pVCpu, bRm));
6532 IEM_MC_ADVANCE_RIP_AND_FINISH();
6533 IEM_MC_END();
6534 }
6535 else
6536 {
6537 /*
6538 * Register, memory.
6539 */
6540 IEM_MC_BEGIN(0, 2);
6541 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6543
6544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6546 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6547 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6548 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6549 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6550
6551 IEM_MC_ADVANCE_RIP_AND_FINISH();
6552 IEM_MC_END();
6553 }
6554}
6555
6556
6557/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6558FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6559{
6560 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6562 if (IEM_IS_MODRM_REG_MODE(bRm))
6563 {
6564 /*
6565 * Register, register.
6566 */
6567 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6568 IEM_MC_BEGIN(3, 0);
6569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6570 IEM_MC_ARG(uint64_t *, pDst, 0);
6571 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6572 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6573 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6574 IEM_MC_PREPARE_FPU_USAGE();
6575 IEM_MC_FPU_TO_MMX_MODE();
6576
6577 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6578 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6579 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6580 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6581
6582 IEM_MC_ADVANCE_RIP_AND_FINISH();
6583 IEM_MC_END();
6584 }
6585 else
6586 {
6587 /*
6588 * Register, memory.
6589 */
6590 IEM_MC_BEGIN(3, 2);
6591 IEM_MC_ARG(uint64_t *, pDst, 0);
6592 IEM_MC_LOCAL(uint64_t, uSrc);
6593 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6595
6596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6597 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6598 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6600 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6601 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6602
6603 IEM_MC_PREPARE_FPU_USAGE();
6604 IEM_MC_FPU_TO_MMX_MODE();
6605
6606 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6607 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6608 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6609
6610 IEM_MC_ADVANCE_RIP_AND_FINISH();
6611 IEM_MC_END();
6612 }
6613}
6614
6615
6616/**
6617 * Common worker for SSE2 instructions on the forms:
6618 * pshufd xmm1, xmm2/mem128, imm8
6619 * pshufhw xmm1, xmm2/mem128, imm8
6620 * pshuflw xmm1, xmm2/mem128, imm8
6621 *
6622 * Proper alignment of the 128-bit operand is enforced.
6623 * Exceptions type 4. SSE2 cpuid checks.
6624 */
6625FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6626{
6627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6628 if (IEM_IS_MODRM_REG_MODE(bRm))
6629 {
6630 /*
6631 * Register, register.
6632 */
6633 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6634 IEM_MC_BEGIN(3, 0);
6635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6636 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6637 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6638 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6639 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6640 IEM_MC_PREPARE_SSE_USAGE();
6641 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6642 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6643 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6644 IEM_MC_ADVANCE_RIP_AND_FINISH();
6645 IEM_MC_END();
6646 }
6647 else
6648 {
6649 /*
6650 * Register, memory.
6651 */
6652 IEM_MC_BEGIN(3, 2);
6653 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6654 IEM_MC_LOCAL(RTUINT128U, uSrc);
6655 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6657
6658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6659 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6660 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6662 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6663
6664 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6665 IEM_MC_PREPARE_SSE_USAGE();
6666 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6667 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6668
6669 IEM_MC_ADVANCE_RIP_AND_FINISH();
6670 IEM_MC_END();
6671 }
6672}
6673
6674
6675/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6676FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6677{
6678 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6679 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6680}
6681
6682
6683/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6684FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6685{
6686 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6687 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6688}
6689
6690
6691/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6692FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6693{
6694 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6695 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6696}
6697
6698
6699/**
6700 * Common worker for MMX instructions of the form:
6701 * psrlw mm, imm8
6702 * psraw mm, imm8
6703 * psllw mm, imm8
6704 * psrld mm, imm8
6705 * psrad mm, imm8
6706 * pslld mm, imm8
6707 * psrlq mm, imm8
6708 * psllq mm, imm8
6709 *
6710 */
6711FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6712{
6713 if (IEM_IS_MODRM_REG_MODE(bRm))
6714 {
6715 /*
6716 * Register, immediate.
6717 */
6718 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6719 IEM_MC_BEGIN(2, 0);
6720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6721 IEM_MC_ARG(uint64_t *, pDst, 0);
6722 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6723 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6724 IEM_MC_PREPARE_FPU_USAGE();
6725 IEM_MC_FPU_TO_MMX_MODE();
6726
6727 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6728 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6729 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6730
6731 IEM_MC_ADVANCE_RIP_AND_FINISH();
6732 IEM_MC_END();
6733 }
6734 else
6735 {
6736 /*
6737 * Register, memory not supported.
6738 */
6739 /// @todo Caller already enforced register mode?!
6740 AssertFailedReturn(VINF_SUCCESS);
6741 }
6742}
6743
6744
6745/**
6746 * Common worker for SSE2 instructions of the form:
6747 * psrlw xmm, imm8
6748 * psraw xmm, imm8
6749 * psllw xmm, imm8
6750 * psrld xmm, imm8
6751 * psrad xmm, imm8
6752 * pslld xmm, imm8
6753 * psrlq xmm, imm8
6754 * psllq xmm, imm8
6755 *
6756 */
6757FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6758{
6759 if (IEM_IS_MODRM_REG_MODE(bRm))
6760 {
6761 /*
6762 * Register, immediate.
6763 */
6764 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6765 IEM_MC_BEGIN(2, 0);
6766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6767 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6768 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6769 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6770 IEM_MC_PREPARE_SSE_USAGE();
6771 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6772 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6773 IEM_MC_ADVANCE_RIP_AND_FINISH();
6774 IEM_MC_END();
6775 }
6776 else
6777 {
6778 /*
6779 * Register, memory.
6780 */
6781 /// @todo Caller already enforced register mode?!
6782 AssertFailedReturn(VINF_SUCCESS);
6783 }
6784}
6785
6786
6787/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6788FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6789{
6790// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6791 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6792}
6793
6794
6795/** Opcode 0x66 0x0f 0x71 11/2. */
6796FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6797{
6798// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6799 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6800}
6801
6802
6803/** Opcode 0x0f 0x71 11/4. */
6804FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6805{
6806// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6807 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6808}
6809
6810
6811/** Opcode 0x66 0x0f 0x71 11/4. */
6812FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6813{
6814// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6815 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6816}
6817
6818
6819/** Opcode 0x0f 0x71 11/6. */
6820FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6821{
6822// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6823 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6824}
6825
6826
6827/** Opcode 0x66 0x0f 0x71 11/6. */
6828FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6829{
6830// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6831 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6832}
6833
6834
6835/**
6836 * Group 12 jump table for register variant.
6837 */
6838IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6839{
6840 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6841 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6842 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6843 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6844 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6845 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6846 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6847 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6848};
6849AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6850
6851
6852/** Opcode 0x0f 0x71. */
6853FNIEMOP_DEF(iemOp_Grp12)
6854{
6855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6856 if (IEM_IS_MODRM_REG_MODE(bRm))
6857 /* register, register */
6858 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6859 + pVCpu->iem.s.idxPrefix], bRm);
6860 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6861}
6862
6863
6864/** Opcode 0x0f 0x72 11/2. */
6865FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6866{
6867// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6868 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6869}
6870
6871
6872/** Opcode 0x66 0x0f 0x72 11/2. */
6873FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6874{
6875// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6876 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6877}
6878
6879
6880/** Opcode 0x0f 0x72 11/4. */
6881FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6882{
6883// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6884 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6885}
6886
6887
6888/** Opcode 0x66 0x0f 0x72 11/4. */
6889FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6890{
6891// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6892 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6893}
6894
6895
6896/** Opcode 0x0f 0x72 11/6. */
6897FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6898{
6899// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6900 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6901}
6902
6903/** Opcode 0x66 0x0f 0x72 11/6. */
6904FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6905{
6906// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6907 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6908}
6909
6910
6911/**
6912 * Group 13 jump table for register variant.
6913 */
6914IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6915{
6916 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6917 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6918 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6919 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6920 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6921 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6922 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6923 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6924};
6925AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6926
6927/** Opcode 0x0f 0x72. */
6928FNIEMOP_DEF(iemOp_Grp13)
6929{
6930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6931 if (IEM_IS_MODRM_REG_MODE(bRm))
6932 /* register, register */
6933 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6934 + pVCpu->iem.s.idxPrefix], bRm);
6935 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6936}
6937
6938
6939/** Opcode 0x0f 0x73 11/2. */
6940FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6941{
6942// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6943 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6944}
6945
6946
6947/** Opcode 0x66 0x0f 0x73 11/2. */
6948FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6949{
6950// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6951 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6952}
6953
6954
6955/** Opcode 0x66 0x0f 0x73 11/3. */
6956FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6957{
6958// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6959 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6960}
6961
6962
6963/** Opcode 0x0f 0x73 11/6. */
6964FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6965{
6966// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6967 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6968}
6969
6970
6971/** Opcode 0x66 0x0f 0x73 11/6. */
6972FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6973{
6974// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6975 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6976}
6977
6978
6979/** Opcode 0x66 0x0f 0x73 11/7. */
6980FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6981{
6982// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6983 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6984}
6985
6986/**
6987 * Group 14 jump table for register variant.
6988 */
6989IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6990{
6991 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6992 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6993 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6994 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6995 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6996 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6997 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6998 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6999};
7000AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
7001
7002
7003/** Opcode 0x0f 0x73. */
7004FNIEMOP_DEF(iemOp_Grp14)
7005{
7006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7007 if (IEM_IS_MODRM_REG_MODE(bRm))
7008 /* register, register */
7009 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7010 + pVCpu->iem.s.idxPrefix], bRm);
7011 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7012}
7013
7014
7015/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7016FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7017{
7018 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7019 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7020}
7021
7022
7023/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7024FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7025{
7026 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7027 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7028}
7029
7030
7031/* Opcode 0xf3 0x0f 0x74 - invalid */
7032/* Opcode 0xf2 0x0f 0x74 - invalid */
7033
7034
7035/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7036FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7037{
7038 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7039 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7040}
7041
7042
7043/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7044FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7045{
7046 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7047 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7048}
7049
7050
7051/* Opcode 0xf3 0x0f 0x75 - invalid */
7052/* Opcode 0xf2 0x0f 0x75 - invalid */
7053
7054
7055/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7056FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7057{
7058 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7059 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7060}
7061
7062
7063/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7064FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7065{
7066 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7067 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7068}
7069
7070
7071/* Opcode 0xf3 0x0f 0x76 - invalid */
7072/* Opcode 0xf2 0x0f 0x76 - invalid */
7073
7074
7075/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7076FNIEMOP_DEF(iemOp_emms)
7077{
7078 IEMOP_MNEMONIC(emms, "emms");
7079 IEM_MC_BEGIN(0,0);
7080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7082 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7083 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7084 IEM_MC_FPU_FROM_MMX_MODE();
7085 IEM_MC_ADVANCE_RIP_AND_FINISH();
7086 IEM_MC_END();
7087}
7088
7089/* Opcode 0x66 0x0f 0x77 - invalid */
7090/* Opcode 0xf3 0x0f 0x77 - invalid */
7091/* Opcode 0xf2 0x0f 0x77 - invalid */
7092
7093/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7094#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7095FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7096{
7097 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7098 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7099 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7100 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7101
7102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7103 if (IEM_IS_MODRM_REG_MODE(bRm))
7104 {
7105 /*
7106 * Register, register.
7107 */
7108 if (enmEffOpSize == IEMMODE_64BIT)
7109 {
7110 IEM_MC_BEGIN(2, 0);
7111 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7112 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7113 IEM_MC_ARG(uint64_t, u64Enc, 1);
7114 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7115 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7116 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7117 IEM_MC_END();
7118 }
7119 else
7120 {
7121 IEM_MC_BEGIN(2, 0);
7122 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7123 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7124 IEM_MC_ARG(uint32_t, u32Enc, 1);
7125 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7126 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7127 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7128 IEM_MC_END();
7129 }
7130 }
7131 else
7132 {
7133 /*
7134 * Memory, register.
7135 */
7136 if (enmEffOpSize == IEMMODE_64BIT)
7137 {
7138 IEM_MC_BEGIN(3, 0);
7139 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7140 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7141 IEM_MC_ARG(uint64_t, u64Enc, 2);
7142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7143 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7144 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7145 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7146 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7147 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7148 IEM_MC_END();
7149 }
7150 else
7151 {
7152 IEM_MC_BEGIN(3, 0);
7153 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7154 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7155 IEM_MC_ARG(uint32_t, u32Enc, 2);
7156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7157 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7158 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7159 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7160 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7161 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7162 IEM_MC_END();
7163 }
7164 }
7165}
7166#else
7167FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7168#endif
7169
7170/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7171FNIEMOP_STUB(iemOp_AmdGrp17);
7172/* Opcode 0xf3 0x0f 0x78 - invalid */
7173/* Opcode 0xf2 0x0f 0x78 - invalid */
7174
7175/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7176#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7177FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7178{
7179 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7180 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7181 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7182 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7183
7184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7185 if (IEM_IS_MODRM_REG_MODE(bRm))
7186 {
7187 /*
7188 * Register, register.
7189 */
7190 if (enmEffOpSize == IEMMODE_64BIT)
7191 {
7192 IEM_MC_BEGIN(2, 0);
7193 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7194 IEM_MC_ARG(uint64_t, u64Val, 0);
7195 IEM_MC_ARG(uint64_t, u64Enc, 1);
7196 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7197 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7198 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7199 IEM_MC_END();
7200 }
7201 else
7202 {
7203 IEM_MC_BEGIN(2, 0);
7204 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7205 IEM_MC_ARG(uint32_t, u32Val, 0);
7206 IEM_MC_ARG(uint32_t, u32Enc, 1);
7207 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7208 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7209 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7210 IEM_MC_END();
7211 }
7212 }
7213 else
7214 {
7215 /*
7216 * Register, memory.
7217 */
7218 if (enmEffOpSize == IEMMODE_64BIT)
7219 {
7220 IEM_MC_BEGIN(3, 0);
7221 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7222 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7223 IEM_MC_ARG(uint64_t, u64Enc, 2);
7224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7225 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7226 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7227 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7228 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7229 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7230 IEM_MC_END();
7231 }
7232 else
7233 {
7234 IEM_MC_BEGIN(3, 0);
7235 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7236 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7237 IEM_MC_ARG(uint32_t, u32Enc, 2);
7238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7239 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7240 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7241 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7242 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7243 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7244 IEM_MC_END();
7245 }
7246 }
7247}
7248#else
7249FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7250#endif
7251/* Opcode 0x66 0x0f 0x79 - invalid */
7252/* Opcode 0xf3 0x0f 0x79 - invalid */
7253/* Opcode 0xf2 0x0f 0x79 - invalid */
7254
7255/* Opcode 0x0f 0x7a - invalid */
7256/* Opcode 0x66 0x0f 0x7a - invalid */
7257/* Opcode 0xf3 0x0f 0x7a - invalid */
7258/* Opcode 0xf2 0x0f 0x7a - invalid */
7259
7260/* Opcode 0x0f 0x7b - invalid */
7261/* Opcode 0x66 0x0f 0x7b - invalid */
7262/* Opcode 0xf3 0x0f 0x7b - invalid */
7263/* Opcode 0xf2 0x0f 0x7b - invalid */
7264
7265/* Opcode 0x0f 0x7c - invalid */
7266
7267
7268/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7269FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7270{
7271 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7272 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7273}
7274
7275
7276/* Opcode 0xf3 0x0f 0x7c - invalid */
7277
7278
7279/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7280FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7281{
7282 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7283 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7284}
7285
7286
7287/* Opcode 0x0f 0x7d - invalid */
7288
7289
7290/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7291FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7292{
7293 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7294 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7295}
7296
7297
7298/* Opcode 0xf3 0x0f 0x7d - invalid */
7299
7300
7301/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7302FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7303{
7304 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7305 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7306}
7307
7308
7309/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7310FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7311{
7312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7313 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7314 {
7315 /**
7316 * @opcode 0x7e
7317 * @opcodesub rex.w=1
7318 * @oppfx none
7319 * @opcpuid mmx
7320 * @opgroup og_mmx_datamove
7321 * @opxcpttype 5
7322 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7323 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7324 */
7325 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7326 if (IEM_IS_MODRM_REG_MODE(bRm))
7327 {
7328 /* greg64, MMX */
7329 IEM_MC_BEGIN(0, 1);
7330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7331 IEM_MC_LOCAL(uint64_t, u64Tmp);
7332
7333 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7334 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7335 IEM_MC_FPU_TO_MMX_MODE();
7336
7337 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7338 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7339
7340 IEM_MC_ADVANCE_RIP_AND_FINISH();
7341 IEM_MC_END();
7342 }
7343 else
7344 {
7345 /* [mem64], MMX */
7346 IEM_MC_BEGIN(0, 2);
7347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7348 IEM_MC_LOCAL(uint64_t, u64Tmp);
7349
7350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7352 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7353 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7354 IEM_MC_FPU_TO_MMX_MODE();
7355
7356 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7357 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7358
7359 IEM_MC_ADVANCE_RIP_AND_FINISH();
7360 IEM_MC_END();
7361 }
7362 }
7363 else
7364 {
7365 /**
7366 * @opdone
7367 * @opcode 0x7e
7368 * @opcodesub rex.w=0
7369 * @oppfx none
7370 * @opcpuid mmx
7371 * @opgroup og_mmx_datamove
7372 * @opxcpttype 5
7373 * @opfunction iemOp_movd_q_Pd_Ey
7374 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7375 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7376 */
7377 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7378 if (IEM_IS_MODRM_REG_MODE(bRm))
7379 {
7380 /* greg32, MMX */
7381 IEM_MC_BEGIN(0, 1);
7382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7383 IEM_MC_LOCAL(uint32_t, u32Tmp);
7384
7385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7386 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7387 IEM_MC_FPU_TO_MMX_MODE();
7388
7389 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7390 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7391
7392 IEM_MC_ADVANCE_RIP_AND_FINISH();
7393 IEM_MC_END();
7394 }
7395 else
7396 {
7397 /* [mem32], MMX */
7398 IEM_MC_BEGIN(0, 2);
7399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7400 IEM_MC_LOCAL(uint32_t, u32Tmp);
7401
7402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7404 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7405 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7406 IEM_MC_FPU_TO_MMX_MODE();
7407
7408 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7409 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7410
7411 IEM_MC_ADVANCE_RIP_AND_FINISH();
7412 IEM_MC_END();
7413 }
7414 }
7415}
7416
7417
7418FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7419{
7420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7421 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7422 {
7423 /**
7424 * @opcode 0x7e
7425 * @opcodesub rex.w=1
7426 * @oppfx 0x66
7427 * @opcpuid sse2
7428 * @opgroup og_sse2_simdint_datamove
7429 * @opxcpttype 5
7430 * @optest 64-bit / op1=1 op2=2 -> op1=2
7431 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7432 */
7433 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7434 if (IEM_IS_MODRM_REG_MODE(bRm))
7435 {
7436 /* greg64, XMM */
7437 IEM_MC_BEGIN(0, 1);
7438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7439 IEM_MC_LOCAL(uint64_t, u64Tmp);
7440
7441 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7443
7444 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7445 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7446
7447 IEM_MC_ADVANCE_RIP_AND_FINISH();
7448 IEM_MC_END();
7449 }
7450 else
7451 {
7452 /* [mem64], XMM */
7453 IEM_MC_BEGIN(0, 2);
7454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7455 IEM_MC_LOCAL(uint64_t, u64Tmp);
7456
7457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7459 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7460 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7461
7462 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7463 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7464
7465 IEM_MC_ADVANCE_RIP_AND_FINISH();
7466 IEM_MC_END();
7467 }
7468 }
7469 else
7470 {
7471 /**
7472 * @opdone
7473 * @opcode 0x7e
7474 * @opcodesub rex.w=0
7475 * @oppfx 0x66
7476 * @opcpuid sse2
7477 * @opgroup og_sse2_simdint_datamove
7478 * @opxcpttype 5
7479 * @opfunction iemOp_movd_q_Vy_Ey
7480 * @optest op1=1 op2=2 -> op1=2
7481 * @optest op1=0 op2=-42 -> op1=-42
7482 */
7483 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7484 if (IEM_IS_MODRM_REG_MODE(bRm))
7485 {
7486 /* greg32, XMM */
7487 IEM_MC_BEGIN(0, 1);
7488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7489 IEM_MC_LOCAL(uint32_t, u32Tmp);
7490
7491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7492 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7493
7494 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7495 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7496
7497 IEM_MC_ADVANCE_RIP_AND_FINISH();
7498 IEM_MC_END();
7499 }
7500 else
7501 {
7502 /* [mem32], XMM */
7503 IEM_MC_BEGIN(0, 2);
7504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7505 IEM_MC_LOCAL(uint32_t, u32Tmp);
7506
7507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7509 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7510 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7511
7512 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7513 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7514
7515 IEM_MC_ADVANCE_RIP_AND_FINISH();
7516 IEM_MC_END();
7517 }
7518 }
7519}
7520
7521/**
7522 * @opcode 0x7e
7523 * @oppfx 0xf3
7524 * @opcpuid sse2
7525 * @opgroup og_sse2_pcksclr_datamove
7526 * @opxcpttype none
7527 * @optest op1=1 op2=2 -> op1=2
7528 * @optest op1=0 op2=-42 -> op1=-42
7529 */
7530FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7531{
7532 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7534 if (IEM_IS_MODRM_REG_MODE(bRm))
7535 {
7536 /*
7537 * XMM128, XMM64.
7538 */
7539 IEM_MC_BEGIN(0, 2);
7540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7541 IEM_MC_LOCAL(uint64_t, uSrc);
7542
7543 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7544 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7545
7546 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7547 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7548
7549 IEM_MC_ADVANCE_RIP_AND_FINISH();
7550 IEM_MC_END();
7551 }
7552 else
7553 {
7554 /*
7555 * XMM128, [mem64].
7556 */
7557 IEM_MC_BEGIN(0, 2);
7558 IEM_MC_LOCAL(uint64_t, uSrc);
7559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7560
7561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7565
7566 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7567 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7568
7569 IEM_MC_ADVANCE_RIP_AND_FINISH();
7570 IEM_MC_END();
7571 }
7572}
7573
7574/* Opcode 0xf2 0x0f 0x7e - invalid */
7575
7576
7577/** Opcode 0x0f 0x7f - movq Qq, Pq */
7578FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7579{
7580 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7582 if (IEM_IS_MODRM_REG_MODE(bRm))
7583 {
7584 /*
7585 * MMX, MMX.
7586 */
7587 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7588 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7589 IEM_MC_BEGIN(0, 1);
7590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7591 IEM_MC_LOCAL(uint64_t, u64Tmp);
7592 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7593 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7594 IEM_MC_FPU_TO_MMX_MODE();
7595
7596 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7597 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7598
7599 IEM_MC_ADVANCE_RIP_AND_FINISH();
7600 IEM_MC_END();
7601 }
7602 else
7603 {
7604 /*
7605 * [mem64], MMX.
7606 */
7607 IEM_MC_BEGIN(0, 2);
7608 IEM_MC_LOCAL(uint64_t, u64Tmp);
7609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7610
7611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7613 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7614 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7615 IEM_MC_FPU_TO_MMX_MODE();
7616
7617 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7618 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7619
7620 IEM_MC_ADVANCE_RIP_AND_FINISH();
7621 IEM_MC_END();
7622 }
7623}
7624
7625/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7626FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7627{
7628 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7630 if (IEM_IS_MODRM_REG_MODE(bRm))
7631 {
7632 /*
7633 * XMM, XMM.
7634 */
7635 IEM_MC_BEGIN(0, 0);
7636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7637 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7638 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7639 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7640 IEM_GET_MODRM_REG(pVCpu, bRm));
7641 IEM_MC_ADVANCE_RIP_AND_FINISH();
7642 IEM_MC_END();
7643 }
7644 else
7645 {
7646 /*
7647 * [mem128], XMM.
7648 */
7649 IEM_MC_BEGIN(0, 2);
7650 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7652
7653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7655 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7656 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7657
7658 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7659 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7660
7661 IEM_MC_ADVANCE_RIP_AND_FINISH();
7662 IEM_MC_END();
7663 }
7664}
7665
7666/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7667FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7668{
7669 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7671 if (IEM_IS_MODRM_REG_MODE(bRm))
7672 {
7673 /*
7674 * XMM, XMM.
7675 */
7676 IEM_MC_BEGIN(0, 0);
7677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7678 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7679 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7680 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7681 IEM_GET_MODRM_REG(pVCpu, bRm));
7682 IEM_MC_ADVANCE_RIP_AND_FINISH();
7683 IEM_MC_END();
7684 }
7685 else
7686 {
7687 /*
7688 * [mem128], XMM.
7689 */
7690 IEM_MC_BEGIN(0, 2);
7691 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7693
7694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7696 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7697 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7698
7699 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7700 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7701
7702 IEM_MC_ADVANCE_RIP_AND_FINISH();
7703 IEM_MC_END();
7704 }
7705}
7706
7707/* Opcode 0xf2 0x0f 0x7f - invalid */
7708
7709
7710
7711/** Opcode 0x0f 0x80. */
7712FNIEMOP_DEF(iemOp_jo_Jv)
7713{
7714 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7715 IEMOP_HLP_MIN_386();
7716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7717 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7718 {
7719 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7720 IEM_MC_BEGIN(0, 0);
7721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7723 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7724 } IEM_MC_ELSE() {
7725 IEM_MC_ADVANCE_RIP_AND_FINISH();
7726 } IEM_MC_ENDIF();
7727 IEM_MC_END();
7728 }
7729 else
7730 {
7731 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7732 IEM_MC_BEGIN(0, 0);
7733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7735 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7736 } IEM_MC_ELSE() {
7737 IEM_MC_ADVANCE_RIP_AND_FINISH();
7738 } IEM_MC_ENDIF();
7739 IEM_MC_END();
7740 }
7741}
7742
7743
7744/** Opcode 0x0f 0x81. */
7745FNIEMOP_DEF(iemOp_jno_Jv)
7746{
7747 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7748 IEMOP_HLP_MIN_386();
7749 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7750 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7751 {
7752 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7753 IEM_MC_BEGIN(0, 0);
7754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7755 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7756 IEM_MC_ADVANCE_RIP_AND_FINISH();
7757 } IEM_MC_ELSE() {
7758 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7759 } IEM_MC_ENDIF();
7760 IEM_MC_END();
7761 }
7762 else
7763 {
7764 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7765 IEM_MC_BEGIN(0, 0);
7766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7767 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7768 IEM_MC_ADVANCE_RIP_AND_FINISH();
7769 } IEM_MC_ELSE() {
7770 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7771 } IEM_MC_ENDIF();
7772 IEM_MC_END();
7773 }
7774}
7775
7776
7777/** Opcode 0x0f 0x82. */
7778FNIEMOP_DEF(iemOp_jc_Jv)
7779{
7780 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7781 IEMOP_HLP_MIN_386();
7782 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7783 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7784 {
7785 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7786 IEM_MC_BEGIN(0, 0);
7787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7789 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7790 } IEM_MC_ELSE() {
7791 IEM_MC_ADVANCE_RIP_AND_FINISH();
7792 } IEM_MC_ENDIF();
7793 IEM_MC_END();
7794 }
7795 else
7796 {
7797 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7798 IEM_MC_BEGIN(0, 0);
7799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7800 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7801 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7802 } IEM_MC_ELSE() {
7803 IEM_MC_ADVANCE_RIP_AND_FINISH();
7804 } IEM_MC_ENDIF();
7805 IEM_MC_END();
7806 }
7807}
7808
7809
7810/** Opcode 0x0f 0x83. */
7811FNIEMOP_DEF(iemOp_jnc_Jv)
7812{
7813 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7814 IEMOP_HLP_MIN_386();
7815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7816 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7817 {
7818 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7819 IEM_MC_BEGIN(0, 0);
7820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7821 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7822 IEM_MC_ADVANCE_RIP_AND_FINISH();
7823 } IEM_MC_ELSE() {
7824 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7825 } IEM_MC_ENDIF();
7826 IEM_MC_END();
7827 }
7828 else
7829 {
7830 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7831 IEM_MC_BEGIN(0, 0);
7832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7833 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7834 IEM_MC_ADVANCE_RIP_AND_FINISH();
7835 } IEM_MC_ELSE() {
7836 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7837 } IEM_MC_ENDIF();
7838 IEM_MC_END();
7839 }
7840}
7841
7842
7843/** Opcode 0x0f 0x84. */
7844FNIEMOP_DEF(iemOp_je_Jv)
7845{
7846 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7847 IEMOP_HLP_MIN_386();
7848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7849 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7850 {
7851 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7852 IEM_MC_BEGIN(0, 0);
7853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7854 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7855 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7856 } IEM_MC_ELSE() {
7857 IEM_MC_ADVANCE_RIP_AND_FINISH();
7858 } IEM_MC_ENDIF();
7859 IEM_MC_END();
7860 }
7861 else
7862 {
7863 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7864 IEM_MC_BEGIN(0, 0);
7865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7867 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7868 } IEM_MC_ELSE() {
7869 IEM_MC_ADVANCE_RIP_AND_FINISH();
7870 } IEM_MC_ENDIF();
7871 IEM_MC_END();
7872 }
7873}
7874
7875
7876/** Opcode 0x0f 0x85. */
7877FNIEMOP_DEF(iemOp_jne_Jv)
7878{
7879 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7880 IEMOP_HLP_MIN_386();
7881 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7882 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7883 {
7884 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7885 IEM_MC_BEGIN(0, 0);
7886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7887 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7888 IEM_MC_ADVANCE_RIP_AND_FINISH();
7889 } IEM_MC_ELSE() {
7890 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7891 } IEM_MC_ENDIF();
7892 IEM_MC_END();
7893 }
7894 else
7895 {
7896 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7897 IEM_MC_BEGIN(0, 0);
7898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7900 IEM_MC_ADVANCE_RIP_AND_FINISH();
7901 } IEM_MC_ELSE() {
7902 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7903 } IEM_MC_ENDIF();
7904 IEM_MC_END();
7905 }
7906}
7907
7908
7909/** Opcode 0x0f 0x86. */
7910FNIEMOP_DEF(iemOp_jbe_Jv)
7911{
7912 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7913 IEMOP_HLP_MIN_386();
7914 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7915 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7916 {
7917 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7918 IEM_MC_BEGIN(0, 0);
7919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7920 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7921 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7922 } IEM_MC_ELSE() {
7923 IEM_MC_ADVANCE_RIP_AND_FINISH();
7924 } IEM_MC_ENDIF();
7925 IEM_MC_END();
7926 }
7927 else
7928 {
7929 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7930 IEM_MC_BEGIN(0, 0);
7931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7932 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7933 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7934 } IEM_MC_ELSE() {
7935 IEM_MC_ADVANCE_RIP_AND_FINISH();
7936 } IEM_MC_ENDIF();
7937 IEM_MC_END();
7938 }
7939}
7940
7941
7942/** Opcode 0x0f 0x87. */
7943FNIEMOP_DEF(iemOp_jnbe_Jv)
7944{
7945 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7946 IEMOP_HLP_MIN_386();
7947 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7948 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7949 {
7950 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7951 IEM_MC_BEGIN(0, 0);
7952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7953 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7954 IEM_MC_ADVANCE_RIP_AND_FINISH();
7955 } IEM_MC_ELSE() {
7956 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7957 } IEM_MC_ENDIF();
7958 IEM_MC_END();
7959 }
7960 else
7961 {
7962 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7963 IEM_MC_BEGIN(0, 0);
7964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7965 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7966 IEM_MC_ADVANCE_RIP_AND_FINISH();
7967 } IEM_MC_ELSE() {
7968 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7969 } IEM_MC_ENDIF();
7970 IEM_MC_END();
7971 }
7972}
7973
7974
7975/** Opcode 0x0f 0x88. */
7976FNIEMOP_DEF(iemOp_js_Jv)
7977{
7978 IEMOP_MNEMONIC(js_Jv, "js Jv");
7979 IEMOP_HLP_MIN_386();
7980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7981 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7982 {
7983 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7984 IEM_MC_BEGIN(0, 0);
7985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7987 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7988 } IEM_MC_ELSE() {
7989 IEM_MC_ADVANCE_RIP_AND_FINISH();
7990 } IEM_MC_ENDIF();
7991 IEM_MC_END();
7992 }
7993 else
7994 {
7995 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7996 IEM_MC_BEGIN(0, 0);
7997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7998 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7999 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8000 } IEM_MC_ELSE() {
8001 IEM_MC_ADVANCE_RIP_AND_FINISH();
8002 } IEM_MC_ENDIF();
8003 IEM_MC_END();
8004 }
8005}
8006
8007
8008/** Opcode 0x0f 0x89. */
8009FNIEMOP_DEF(iemOp_jns_Jv)
8010{
8011 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8012 IEMOP_HLP_MIN_386();
8013 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8014 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8015 {
8016 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8017 IEM_MC_BEGIN(0, 0);
8018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8020 IEM_MC_ADVANCE_RIP_AND_FINISH();
8021 } IEM_MC_ELSE() {
8022 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8023 } IEM_MC_ENDIF();
8024 IEM_MC_END();
8025 }
8026 else
8027 {
8028 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8029 IEM_MC_BEGIN(0, 0);
8030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8031 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8032 IEM_MC_ADVANCE_RIP_AND_FINISH();
8033 } IEM_MC_ELSE() {
8034 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8035 } IEM_MC_ENDIF();
8036 IEM_MC_END();
8037 }
8038}
8039
8040
8041/** Opcode 0x0f 0x8a. */
8042FNIEMOP_DEF(iemOp_jp_Jv)
8043{
8044 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8045 IEMOP_HLP_MIN_386();
8046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8047 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8048 {
8049 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8050 IEM_MC_BEGIN(0, 0);
8051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8052 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8053 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8054 } IEM_MC_ELSE() {
8055 IEM_MC_ADVANCE_RIP_AND_FINISH();
8056 } IEM_MC_ENDIF();
8057 IEM_MC_END();
8058 }
8059 else
8060 {
8061 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8062 IEM_MC_BEGIN(0, 0);
8063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8064 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8065 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8066 } IEM_MC_ELSE() {
8067 IEM_MC_ADVANCE_RIP_AND_FINISH();
8068 } IEM_MC_ENDIF();
8069 IEM_MC_END();
8070 }
8071}
8072
8073
8074/** Opcode 0x0f 0x8b. */
8075FNIEMOP_DEF(iemOp_jnp_Jv)
8076{
8077 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8078 IEMOP_HLP_MIN_386();
8079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8080 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8081 {
8082 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8083 IEM_MC_BEGIN(0, 0);
8084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8085 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8086 IEM_MC_ADVANCE_RIP_AND_FINISH();
8087 } IEM_MC_ELSE() {
8088 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8089 } IEM_MC_ENDIF();
8090 IEM_MC_END();
8091 }
8092 else
8093 {
8094 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8095 IEM_MC_BEGIN(0, 0);
8096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8097 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8098 IEM_MC_ADVANCE_RIP_AND_FINISH();
8099 } IEM_MC_ELSE() {
8100 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8101 } IEM_MC_ENDIF();
8102 IEM_MC_END();
8103 }
8104}
8105
8106
8107/** Opcode 0x0f 0x8c. */
8108FNIEMOP_DEF(iemOp_jl_Jv)
8109{
8110 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8111 IEMOP_HLP_MIN_386();
8112 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8113 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8114 {
8115 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8116 IEM_MC_BEGIN(0, 0);
8117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8118 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8119 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8120 } IEM_MC_ELSE() {
8121 IEM_MC_ADVANCE_RIP_AND_FINISH();
8122 } IEM_MC_ENDIF();
8123 IEM_MC_END();
8124 }
8125 else
8126 {
8127 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8128 IEM_MC_BEGIN(0, 0);
8129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8130 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8131 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8132 } IEM_MC_ELSE() {
8133 IEM_MC_ADVANCE_RIP_AND_FINISH();
8134 } IEM_MC_ENDIF();
8135 IEM_MC_END();
8136 }
8137}
8138
8139
8140/** Opcode 0x0f 0x8d. */
8141FNIEMOP_DEF(iemOp_jnl_Jv)
8142{
8143 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8144 IEMOP_HLP_MIN_386();
8145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8146 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8147 {
8148 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8149 IEM_MC_BEGIN(0, 0);
8150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8151 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8152 IEM_MC_ADVANCE_RIP_AND_FINISH();
8153 } IEM_MC_ELSE() {
8154 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8155 } IEM_MC_ENDIF();
8156 IEM_MC_END();
8157 }
8158 else
8159 {
8160 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8161 IEM_MC_BEGIN(0, 0);
8162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8163 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8164 IEM_MC_ADVANCE_RIP_AND_FINISH();
8165 } IEM_MC_ELSE() {
8166 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8167 } IEM_MC_ENDIF();
8168 IEM_MC_END();
8169 }
8170}
8171
8172
8173/** Opcode 0x0f 0x8e. */
8174FNIEMOP_DEF(iemOp_jle_Jv)
8175{
8176 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8177 IEMOP_HLP_MIN_386();
8178 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8179 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8180 {
8181 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8182 IEM_MC_BEGIN(0, 0);
8183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8184 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8185 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8186 } IEM_MC_ELSE() {
8187 IEM_MC_ADVANCE_RIP_AND_FINISH();
8188 } IEM_MC_ENDIF();
8189 IEM_MC_END();
8190 }
8191 else
8192 {
8193 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8194 IEM_MC_BEGIN(0, 0);
8195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8196 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8197 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8198 } IEM_MC_ELSE() {
8199 IEM_MC_ADVANCE_RIP_AND_FINISH();
8200 } IEM_MC_ENDIF();
8201 IEM_MC_END();
8202 }
8203}
8204
8205
8206/** Opcode 0x0f 0x8f. */
8207FNIEMOP_DEF(iemOp_jnle_Jv)
8208{
8209 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8210 IEMOP_HLP_MIN_386();
8211 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8212 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8213 {
8214 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8215 IEM_MC_BEGIN(0, 0);
8216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8217 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8218 IEM_MC_ADVANCE_RIP_AND_FINISH();
8219 } IEM_MC_ELSE() {
8220 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8221 } IEM_MC_ENDIF();
8222 IEM_MC_END();
8223 }
8224 else
8225 {
8226 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8227 IEM_MC_BEGIN(0, 0);
8228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8229 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8230 IEM_MC_ADVANCE_RIP_AND_FINISH();
8231 } IEM_MC_ELSE() {
8232 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8233 } IEM_MC_ENDIF();
8234 IEM_MC_END();
8235 }
8236}
8237
8238
8239/** Opcode 0x0f 0x90. */
8240FNIEMOP_DEF(iemOp_seto_Eb)
8241{
8242 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8243 IEMOP_HLP_MIN_386();
8244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8245
8246 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8247 * any way. AMD says it's "unused", whatever that means. We're
8248 * ignoring for now. */
8249 if (IEM_IS_MODRM_REG_MODE(bRm))
8250 {
8251 /* register target */
8252 IEM_MC_BEGIN(0, 0);
8253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8254 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8255 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8256 } IEM_MC_ELSE() {
8257 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8258 } IEM_MC_ENDIF();
8259 IEM_MC_ADVANCE_RIP_AND_FINISH();
8260 IEM_MC_END();
8261 }
8262 else
8263 {
8264 /* memory target */
8265 IEM_MC_BEGIN(0, 1);
8266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8269 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8270 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8271 } IEM_MC_ELSE() {
8272 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8273 } IEM_MC_ENDIF();
8274 IEM_MC_ADVANCE_RIP_AND_FINISH();
8275 IEM_MC_END();
8276 }
8277}
8278
8279
8280/** Opcode 0x0f 0x91. */
8281FNIEMOP_DEF(iemOp_setno_Eb)
8282{
8283 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8284 IEMOP_HLP_MIN_386();
8285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8286
8287 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8288 * any way. AMD says it's "unused", whatever that means. We're
8289 * ignoring for now. */
8290 if (IEM_IS_MODRM_REG_MODE(bRm))
8291 {
8292 /* register target */
8293 IEM_MC_BEGIN(0, 0);
8294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8295 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8296 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8297 } IEM_MC_ELSE() {
8298 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8299 } IEM_MC_ENDIF();
8300 IEM_MC_ADVANCE_RIP_AND_FINISH();
8301 IEM_MC_END();
8302 }
8303 else
8304 {
8305 /* memory target */
8306 IEM_MC_BEGIN(0, 1);
8307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8310 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8311 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8312 } IEM_MC_ELSE() {
8313 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8314 } IEM_MC_ENDIF();
8315 IEM_MC_ADVANCE_RIP_AND_FINISH();
8316 IEM_MC_END();
8317 }
8318}
8319
8320
8321/** Opcode 0x0f 0x92. */
8322FNIEMOP_DEF(iemOp_setc_Eb)
8323{
8324 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8325 IEMOP_HLP_MIN_386();
8326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8327
8328 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8329 * any way. AMD says it's "unused", whatever that means. We're
8330 * ignoring for now. */
8331 if (IEM_IS_MODRM_REG_MODE(bRm))
8332 {
8333 /* register target */
8334 IEM_MC_BEGIN(0, 0);
8335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8336 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8337 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8338 } IEM_MC_ELSE() {
8339 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8340 } IEM_MC_ENDIF();
8341 IEM_MC_ADVANCE_RIP_AND_FINISH();
8342 IEM_MC_END();
8343 }
8344 else
8345 {
8346 /* memory target */
8347 IEM_MC_BEGIN(0, 1);
8348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8352 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8353 } IEM_MC_ELSE() {
8354 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8355 } IEM_MC_ENDIF();
8356 IEM_MC_ADVANCE_RIP_AND_FINISH();
8357 IEM_MC_END();
8358 }
8359}
8360
8361
8362/** Opcode 0x0f 0x93. */
8363FNIEMOP_DEF(iemOp_setnc_Eb)
8364{
8365 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8366 IEMOP_HLP_MIN_386();
8367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8368
8369 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8370 * any way. AMD says it's "unused", whatever that means. We're
8371 * ignoring for now. */
8372 if (IEM_IS_MODRM_REG_MODE(bRm))
8373 {
8374 /* register target */
8375 IEM_MC_BEGIN(0, 0);
8376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8377 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8378 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8379 } IEM_MC_ELSE() {
8380 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8381 } IEM_MC_ENDIF();
8382 IEM_MC_ADVANCE_RIP_AND_FINISH();
8383 IEM_MC_END();
8384 }
8385 else
8386 {
8387 /* memory target */
8388 IEM_MC_BEGIN(0, 1);
8389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8392 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8393 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8394 } IEM_MC_ELSE() {
8395 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8396 } IEM_MC_ENDIF();
8397 IEM_MC_ADVANCE_RIP_AND_FINISH();
8398 IEM_MC_END();
8399 }
8400}
8401
8402
8403/** Opcode 0x0f 0x94. */
8404FNIEMOP_DEF(iemOp_sete_Eb)
8405{
8406 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8407 IEMOP_HLP_MIN_386();
8408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8409
8410 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8411 * any way. AMD says it's "unused", whatever that means. We're
8412 * ignoring for now. */
8413 if (IEM_IS_MODRM_REG_MODE(bRm))
8414 {
8415 /* register target */
8416 IEM_MC_BEGIN(0, 0);
8417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8418 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8419 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8420 } IEM_MC_ELSE() {
8421 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8422 } IEM_MC_ENDIF();
8423 IEM_MC_ADVANCE_RIP_AND_FINISH();
8424 IEM_MC_END();
8425 }
8426 else
8427 {
8428 /* memory target */
8429 IEM_MC_BEGIN(0, 1);
8430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8433 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8434 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8435 } IEM_MC_ELSE() {
8436 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8437 } IEM_MC_ENDIF();
8438 IEM_MC_ADVANCE_RIP_AND_FINISH();
8439 IEM_MC_END();
8440 }
8441}
8442
8443
8444/** Opcode 0x0f 0x95. */
8445FNIEMOP_DEF(iemOp_setne_Eb)
8446{
8447 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8448 IEMOP_HLP_MIN_386();
8449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8450
8451 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8452 * any way. AMD says it's "unused", whatever that means. We're
8453 * ignoring for now. */
8454 if (IEM_IS_MODRM_REG_MODE(bRm))
8455 {
8456 /* register target */
8457 IEM_MC_BEGIN(0, 0);
8458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8459 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8460 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8461 } IEM_MC_ELSE() {
8462 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8463 } IEM_MC_ENDIF();
8464 IEM_MC_ADVANCE_RIP_AND_FINISH();
8465 IEM_MC_END();
8466 }
8467 else
8468 {
8469 /* memory target */
8470 IEM_MC_BEGIN(0, 1);
8471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8474 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8475 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8476 } IEM_MC_ELSE() {
8477 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8478 } IEM_MC_ENDIF();
8479 IEM_MC_ADVANCE_RIP_AND_FINISH();
8480 IEM_MC_END();
8481 }
8482}
8483
8484
8485/** Opcode 0x0f 0x96. */
8486FNIEMOP_DEF(iemOp_setbe_Eb)
8487{
8488 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8489 IEMOP_HLP_MIN_386();
8490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8491
8492 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8493 * any way. AMD says it's "unused", whatever that means. We're
8494 * ignoring for now. */
8495 if (IEM_IS_MODRM_REG_MODE(bRm))
8496 {
8497 /* register target */
8498 IEM_MC_BEGIN(0, 0);
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8501 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8502 } IEM_MC_ELSE() {
8503 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8504 } IEM_MC_ENDIF();
8505 IEM_MC_ADVANCE_RIP_AND_FINISH();
8506 IEM_MC_END();
8507 }
8508 else
8509 {
8510 /* memory target */
8511 IEM_MC_BEGIN(0, 1);
8512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8515 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8516 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8517 } IEM_MC_ELSE() {
8518 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8519 } IEM_MC_ENDIF();
8520 IEM_MC_ADVANCE_RIP_AND_FINISH();
8521 IEM_MC_END();
8522 }
8523}
8524
8525
8526/** Opcode 0x0f 0x97. */
8527FNIEMOP_DEF(iemOp_setnbe_Eb)
8528{
8529 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8530 IEMOP_HLP_MIN_386();
8531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8532
8533 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8534 * any way. AMD says it's "unused", whatever that means. We're
8535 * ignoring for now. */
8536 if (IEM_IS_MODRM_REG_MODE(bRm))
8537 {
8538 /* register target */
8539 IEM_MC_BEGIN(0, 0);
8540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8541 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8542 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8543 } IEM_MC_ELSE() {
8544 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8545 } IEM_MC_ENDIF();
8546 IEM_MC_ADVANCE_RIP_AND_FINISH();
8547 IEM_MC_END();
8548 }
8549 else
8550 {
8551 /* memory target */
8552 IEM_MC_BEGIN(0, 1);
8553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8556 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8557 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8558 } IEM_MC_ELSE() {
8559 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8560 } IEM_MC_ENDIF();
8561 IEM_MC_ADVANCE_RIP_AND_FINISH();
8562 IEM_MC_END();
8563 }
8564}
8565
8566
8567/** Opcode 0x0f 0x98. */
8568FNIEMOP_DEF(iemOp_sets_Eb)
8569{
8570 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8571 IEMOP_HLP_MIN_386();
8572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8573
8574 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8575 * any way. AMD says it's "unused", whatever that means. We're
8576 * ignoring for now. */
8577 if (IEM_IS_MODRM_REG_MODE(bRm))
8578 {
8579 /* register target */
8580 IEM_MC_BEGIN(0, 0);
8581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8583 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8584 } IEM_MC_ELSE() {
8585 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8586 } IEM_MC_ENDIF();
8587 IEM_MC_ADVANCE_RIP_AND_FINISH();
8588 IEM_MC_END();
8589 }
8590 else
8591 {
8592 /* memory target */
8593 IEM_MC_BEGIN(0, 1);
8594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8597 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8598 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8599 } IEM_MC_ELSE() {
8600 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8601 } IEM_MC_ENDIF();
8602 IEM_MC_ADVANCE_RIP_AND_FINISH();
8603 IEM_MC_END();
8604 }
8605}
8606
8607
8608/** Opcode 0x0f 0x99. */
8609FNIEMOP_DEF(iemOp_setns_Eb)
8610{
8611 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8612 IEMOP_HLP_MIN_386();
8613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8614
8615 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8616 * any way. AMD says it's "unused", whatever that means. We're
8617 * ignoring for now. */
8618 if (IEM_IS_MODRM_REG_MODE(bRm))
8619 {
8620 /* register target */
8621 IEM_MC_BEGIN(0, 0);
8622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8624 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8625 } IEM_MC_ELSE() {
8626 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8627 } IEM_MC_ENDIF();
8628 IEM_MC_ADVANCE_RIP_AND_FINISH();
8629 IEM_MC_END();
8630 }
8631 else
8632 {
8633 /* memory target */
8634 IEM_MC_BEGIN(0, 1);
8635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8638 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8639 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8640 } IEM_MC_ELSE() {
8641 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8642 } IEM_MC_ENDIF();
8643 IEM_MC_ADVANCE_RIP_AND_FINISH();
8644 IEM_MC_END();
8645 }
8646}
8647
8648
8649/** Opcode 0x0f 0x9a. */
8650FNIEMOP_DEF(iemOp_setp_Eb)
8651{
8652 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8653 IEMOP_HLP_MIN_386();
8654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8655
8656 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8657 * any way. AMD says it's "unused", whatever that means. We're
8658 * ignoring for now. */
8659 if (IEM_IS_MODRM_REG_MODE(bRm))
8660 {
8661 /* register target */
8662 IEM_MC_BEGIN(0, 0);
8663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8664 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8665 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8666 } IEM_MC_ELSE() {
8667 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8668 } IEM_MC_ENDIF();
8669 IEM_MC_ADVANCE_RIP_AND_FINISH();
8670 IEM_MC_END();
8671 }
8672 else
8673 {
8674 /* memory target */
8675 IEM_MC_BEGIN(0, 1);
8676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8679 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8680 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8681 } IEM_MC_ELSE() {
8682 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8683 } IEM_MC_ENDIF();
8684 IEM_MC_ADVANCE_RIP_AND_FINISH();
8685 IEM_MC_END();
8686 }
8687}
8688
8689
8690/** Opcode 0x0f 0x9b. */
8691FNIEMOP_DEF(iemOp_setnp_Eb)
8692{
8693 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8694 IEMOP_HLP_MIN_386();
8695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8696
8697 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8698 * any way. AMD says it's "unused", whatever that means. We're
8699 * ignoring for now. */
8700 if (IEM_IS_MODRM_REG_MODE(bRm))
8701 {
8702 /* register target */
8703 IEM_MC_BEGIN(0, 0);
8704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8705 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8706 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8707 } IEM_MC_ELSE() {
8708 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8709 } IEM_MC_ENDIF();
8710 IEM_MC_ADVANCE_RIP_AND_FINISH();
8711 IEM_MC_END();
8712 }
8713 else
8714 {
8715 /* memory target */
8716 IEM_MC_BEGIN(0, 1);
8717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8721 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8722 } IEM_MC_ELSE() {
8723 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8724 } IEM_MC_ENDIF();
8725 IEM_MC_ADVANCE_RIP_AND_FINISH();
8726 IEM_MC_END();
8727 }
8728}
8729
8730
8731/** Opcode 0x0f 0x9c. */
8732FNIEMOP_DEF(iemOp_setl_Eb)
8733{
8734 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8735 IEMOP_HLP_MIN_386();
8736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8737
8738 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8739 * any way. AMD says it's "unused", whatever that means. We're
8740 * ignoring for now. */
8741 if (IEM_IS_MODRM_REG_MODE(bRm))
8742 {
8743 /* register target */
8744 IEM_MC_BEGIN(0, 0);
8745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8746 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8747 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8748 } IEM_MC_ELSE() {
8749 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8750 } IEM_MC_ENDIF();
8751 IEM_MC_ADVANCE_RIP_AND_FINISH();
8752 IEM_MC_END();
8753 }
8754 else
8755 {
8756 /* memory target */
8757 IEM_MC_BEGIN(0, 1);
8758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8761 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8762 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8763 } IEM_MC_ELSE() {
8764 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8765 } IEM_MC_ENDIF();
8766 IEM_MC_ADVANCE_RIP_AND_FINISH();
8767 IEM_MC_END();
8768 }
8769}
8770
8771
8772/** Opcode 0x0f 0x9d. */
8773FNIEMOP_DEF(iemOp_setnl_Eb)
8774{
8775 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8776 IEMOP_HLP_MIN_386();
8777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8778
8779 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8780 * any way. AMD says it's "unused", whatever that means. We're
8781 * ignoring for now. */
8782 if (IEM_IS_MODRM_REG_MODE(bRm))
8783 {
8784 /* register target */
8785 IEM_MC_BEGIN(0, 0);
8786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8787 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8788 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8789 } IEM_MC_ELSE() {
8790 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8791 } IEM_MC_ENDIF();
8792 IEM_MC_ADVANCE_RIP_AND_FINISH();
8793 IEM_MC_END();
8794 }
8795 else
8796 {
8797 /* memory target */
8798 IEM_MC_BEGIN(0, 1);
8799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8802 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8803 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8804 } IEM_MC_ELSE() {
8805 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8806 } IEM_MC_ENDIF();
8807 IEM_MC_ADVANCE_RIP_AND_FINISH();
8808 IEM_MC_END();
8809 }
8810}
8811
8812
8813/** Opcode 0x0f 0x9e. */
8814FNIEMOP_DEF(iemOp_setle_Eb)
8815{
8816 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8817 IEMOP_HLP_MIN_386();
8818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8819
8820 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8821 * any way. AMD says it's "unused", whatever that means. We're
8822 * ignoring for now. */
8823 if (IEM_IS_MODRM_REG_MODE(bRm))
8824 {
8825 /* register target */
8826 IEM_MC_BEGIN(0, 0);
8827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8828 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8829 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8830 } IEM_MC_ELSE() {
8831 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8832 } IEM_MC_ENDIF();
8833 IEM_MC_ADVANCE_RIP_AND_FINISH();
8834 IEM_MC_END();
8835 }
8836 else
8837 {
8838 /* memory target */
8839 IEM_MC_BEGIN(0, 1);
8840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8843 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8844 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8845 } IEM_MC_ELSE() {
8846 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8847 } IEM_MC_ENDIF();
8848 IEM_MC_ADVANCE_RIP_AND_FINISH();
8849 IEM_MC_END();
8850 }
8851}
8852
8853
8854/** Opcode 0x0f 0x9f. */
8855FNIEMOP_DEF(iemOp_setnle_Eb)
8856{
8857 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8858 IEMOP_HLP_MIN_386();
8859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8860
8861 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8862 * any way. AMD says it's "unused", whatever that means. We're
8863 * ignoring for now. */
8864 if (IEM_IS_MODRM_REG_MODE(bRm))
8865 {
8866 /* register target */
8867 IEM_MC_BEGIN(0, 0);
8868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8869 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8870 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8871 } IEM_MC_ELSE() {
8872 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8873 } IEM_MC_ENDIF();
8874 IEM_MC_ADVANCE_RIP_AND_FINISH();
8875 IEM_MC_END();
8876 }
8877 else
8878 {
8879 /* memory target */
8880 IEM_MC_BEGIN(0, 1);
8881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8884 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8885 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8886 } IEM_MC_ELSE() {
8887 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8888 } IEM_MC_ENDIF();
8889 IEM_MC_ADVANCE_RIP_AND_FINISH();
8890 IEM_MC_END();
8891 }
8892}
8893
8894
8895/** Opcode 0x0f 0xa0. */
8896FNIEMOP_DEF(iemOp_push_fs)
8897{
8898 IEMOP_MNEMONIC(push_fs, "push fs");
8899 IEMOP_HLP_MIN_386();
8900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8901 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8902}
8903
8904
8905/** Opcode 0x0f 0xa1. */
8906FNIEMOP_DEF(iemOp_pop_fs)
8907{
8908 IEMOP_MNEMONIC(pop_fs, "pop fs");
8909 IEMOP_HLP_MIN_386();
8910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8911 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8912}
8913
8914
8915/** Opcode 0x0f 0xa2. */
8916FNIEMOP_DEF(iemOp_cpuid)
8917{
8918 IEMOP_MNEMONIC(cpuid, "cpuid");
8919 IEMOP_HLP_MIN_486(); /* not all 486es. */
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8921 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_cpuid);
8922}
8923
8924
8925/**
8926 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8927 * iemOp_bts_Ev_Gv.
8928 */
8929#define IEMOP_BODY_BIT_Ev_Gv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
8930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8932 \
8933 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8934 { \
8935 /* register destination. */ \
8936 switch (pVCpu->iem.s.enmEffOpSize) \
8937 { \
8938 case IEMMODE_16BIT: \
8939 IEM_MC_BEGIN(3, 0); \
8940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8941 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8942 IEM_MC_ARG(uint16_t, u16Src, 1); \
8943 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8944 \
8945 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8946 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8947 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8948 IEM_MC_REF_EFLAGS(pEFlags); \
8949 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8950 \
8951 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8952 IEM_MC_END(); \
8953 break; \
8954 \
8955 case IEMMODE_32BIT: \
8956 IEM_MC_BEGIN(3, 0); \
8957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8958 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8959 IEM_MC_ARG(uint32_t, u32Src, 1); \
8960 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8961 \
8962 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8963 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8964 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8965 IEM_MC_REF_EFLAGS(pEFlags); \
8966 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8967 \
8968 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
8969 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8970 IEM_MC_END(); \
8971 break; \
8972 \
8973 case IEMMODE_64BIT: \
8974 IEM_MC_BEGIN(3, 0); \
8975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8976 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8977 IEM_MC_ARG(uint64_t, u64Src, 1); \
8978 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8979 \
8980 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8981 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8982 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8983 IEM_MC_REF_EFLAGS(pEFlags); \
8984 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
8985 \
8986 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8987 IEM_MC_END(); \
8988 break; \
8989 \
8990 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8991 } \
8992 } \
8993 else \
8994 { \
8995 /* memory destination. */ \
8996 /** @todo test negative bit offsets! */ \
8997 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
8998 { \
8999 switch (pVCpu->iem.s.enmEffOpSize) \
9000 { \
9001 case IEMMODE_16BIT: \
9002 IEM_MC_BEGIN(3, 2); \
9003 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9004 IEM_MC_ARG(uint16_t, u16Src, 1); \
9005 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9007 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9008 \
9009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9010 IEMOP_HLP_DONE_DECODING(); \
9011 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9012 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9013 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9014 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9015 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9016 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9017 IEM_MC_FETCH_EFLAGS(EFlags); \
9018 \
9019 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9020 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9021 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
9022 \
9023 IEM_MC_COMMIT_EFLAGS(EFlags); \
9024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9025 IEM_MC_END(); \
9026 break; \
9027 \
9028 case IEMMODE_32BIT: \
9029 IEM_MC_BEGIN(3, 2); \
9030 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9031 IEM_MC_ARG(uint32_t, u32Src, 1); \
9032 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9034 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9035 \
9036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9037 IEMOP_HLP_DONE_DECODING(); \
9038 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9039 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9040 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9041 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9042 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9043 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9044 IEM_MC_FETCH_EFLAGS(EFlags); \
9045 \
9046 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9047 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9048 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
9049 \
9050 IEM_MC_COMMIT_EFLAGS(EFlags); \
9051 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9052 IEM_MC_END(); \
9053 break; \
9054 \
9055 case IEMMODE_64BIT: \
9056 IEM_MC_BEGIN(3, 2); \
9057 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9058 IEM_MC_ARG(uint64_t, u64Src, 1); \
9059 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9061 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9062 \
9063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9064 IEMOP_HLP_DONE_DECODING(); \
9065 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9066 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9067 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9068 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9069 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9070 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9071 IEM_MC_FETCH_EFLAGS(EFlags); \
9072 \
9073 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9074 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9075 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
9076 \
9077 IEM_MC_COMMIT_EFLAGS(EFlags); \
9078 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9079 IEM_MC_END(); \
9080 break; \
9081 \
9082 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9083 } \
9084 } \
9085 else \
9086 { \
9087 (void)0
9088
9089#define IEMOP_BODY_BIT_Ev_Gv_NO_LOCK() \
9090 IEMOP_HLP_DONE_DECODING(); \
9091 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9092 } \
9093 } \
9094 (void)0
9095
9096#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9097 switch (pVCpu->iem.s.enmEffOpSize) \
9098 { \
9099 case IEMMODE_16BIT: \
9100 IEM_MC_BEGIN(3, 2); \
9101 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9102 IEM_MC_ARG(uint16_t, u16Src, 1); \
9103 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9105 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9106 \
9107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9108 IEMOP_HLP_DONE_DECODING(); \
9109 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9110 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9111 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9112 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9113 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9114 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9115 IEM_MC_FETCH_EFLAGS(EFlags); \
9116 \
9117 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9118 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9119 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
9120 \
9121 IEM_MC_COMMIT_EFLAGS(EFlags); \
9122 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9123 IEM_MC_END(); \
9124 break; \
9125 \
9126 case IEMMODE_32BIT: \
9127 IEM_MC_BEGIN(3, 2); \
9128 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9129 IEM_MC_ARG(uint32_t, u32Src, 1); \
9130 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9132 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9133 \
9134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9135 IEMOP_HLP_DONE_DECODING(); \
9136 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9137 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9138 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9139 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9140 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9141 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9142 IEM_MC_FETCH_EFLAGS(EFlags); \
9143 \
9144 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9145 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9146 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
9147 \
9148 IEM_MC_COMMIT_EFLAGS(EFlags); \
9149 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9150 IEM_MC_END(); \
9151 break; \
9152 \
9153 case IEMMODE_64BIT: \
9154 IEM_MC_BEGIN(3, 2); \
9155 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9156 IEM_MC_ARG(uint64_t, u64Src, 1); \
9157 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9159 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9160 \
9161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9162 IEMOP_HLP_DONE_DECODING(); \
9163 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9164 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9165 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9166 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9167 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9168 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9169 IEM_MC_FETCH_EFLAGS(EFlags); \
9170 \
9171 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9172 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9173 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
9174 \
9175 IEM_MC_COMMIT_EFLAGS(EFlags); \
9176 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9177 IEM_MC_END(); \
9178 break; \
9179 \
9180 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9181 } \
9182 } \
9183 } \
9184 (void)0
9185
9186
9187/** Opcode 0x0f 0xa3. */
9188FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9189{
9190 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9191 IEMOP_HLP_MIN_386();
9192 IEMOP_BODY_BIT_Ev_Gv(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64, IEM_ACCESS_DATA_R);
9193 IEMOP_BODY_BIT_Ev_Gv_NO_LOCK();
9194}
9195
9196
9197/**
9198 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9199 */
9200FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9201{
9202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9204
9205 if (IEM_IS_MODRM_REG_MODE(bRm))
9206 {
9207 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9208
9209 switch (pVCpu->iem.s.enmEffOpSize)
9210 {
9211 case IEMMODE_16BIT:
9212 IEM_MC_BEGIN(4, 0);
9213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9214 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9215 IEM_MC_ARG(uint16_t, u16Src, 1);
9216 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9217 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9218
9219 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9220 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9221 IEM_MC_REF_EFLAGS(pEFlags);
9222 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9223
9224 IEM_MC_ADVANCE_RIP_AND_FINISH();
9225 IEM_MC_END();
9226 break;
9227
9228 case IEMMODE_32BIT:
9229 IEM_MC_BEGIN(4, 0);
9230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9231 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9232 IEM_MC_ARG(uint32_t, u32Src, 1);
9233 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9234 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9235
9236 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9237 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9238 IEM_MC_REF_EFLAGS(pEFlags);
9239 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9240
9241 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9242 IEM_MC_ADVANCE_RIP_AND_FINISH();
9243 IEM_MC_END();
9244 break;
9245
9246 case IEMMODE_64BIT:
9247 IEM_MC_BEGIN(4, 0);
9248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9250 IEM_MC_ARG(uint64_t, u64Src, 1);
9251 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9252 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9253
9254 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9255 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9256 IEM_MC_REF_EFLAGS(pEFlags);
9257 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9258
9259 IEM_MC_ADVANCE_RIP_AND_FINISH();
9260 IEM_MC_END();
9261 break;
9262
9263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9264 }
9265 }
9266 else
9267 {
9268 switch (pVCpu->iem.s.enmEffOpSize)
9269 {
9270 case IEMMODE_16BIT:
9271 IEM_MC_BEGIN(4, 2);
9272 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9273 IEM_MC_ARG(uint16_t, u16Src, 1);
9274 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9275 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9277
9278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9279 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9280 IEM_MC_ASSIGN(cShiftArg, cShift);
9281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9282 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9283 IEM_MC_FETCH_EFLAGS(EFlags);
9284 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9285 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9286
9287 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9288 IEM_MC_COMMIT_EFLAGS(EFlags);
9289 IEM_MC_ADVANCE_RIP_AND_FINISH();
9290 IEM_MC_END();
9291 break;
9292
9293 case IEMMODE_32BIT:
9294 IEM_MC_BEGIN(4, 2);
9295 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9296 IEM_MC_ARG(uint32_t, u32Src, 1);
9297 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9300
9301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9302 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9303 IEM_MC_ASSIGN(cShiftArg, cShift);
9304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9305 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9306 IEM_MC_FETCH_EFLAGS(EFlags);
9307 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9308 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9309
9310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9311 IEM_MC_COMMIT_EFLAGS(EFlags);
9312 IEM_MC_ADVANCE_RIP_AND_FINISH();
9313 IEM_MC_END();
9314 break;
9315
9316 case IEMMODE_64BIT:
9317 IEM_MC_BEGIN(4, 2);
9318 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9319 IEM_MC_ARG(uint64_t, u64Src, 1);
9320 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9321 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9323
9324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9325 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9326 IEM_MC_ASSIGN(cShiftArg, cShift);
9327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9328 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9329 IEM_MC_FETCH_EFLAGS(EFlags);
9330 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9331 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9332
9333 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9334 IEM_MC_COMMIT_EFLAGS(EFlags);
9335 IEM_MC_ADVANCE_RIP_AND_FINISH();
9336 IEM_MC_END();
9337 break;
9338
9339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9340 }
9341 }
9342}
9343
9344
9345/**
9346 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9347 */
9348FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9349{
9350 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9351 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9352
9353 if (IEM_IS_MODRM_REG_MODE(bRm))
9354 {
9355 switch (pVCpu->iem.s.enmEffOpSize)
9356 {
9357 case IEMMODE_16BIT:
9358 IEM_MC_BEGIN(4, 0);
9359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9360 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9361 IEM_MC_ARG(uint16_t, u16Src, 1);
9362 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9363 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9364
9365 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9366 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9367 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9368 IEM_MC_REF_EFLAGS(pEFlags);
9369 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9370
9371 IEM_MC_ADVANCE_RIP_AND_FINISH();
9372 IEM_MC_END();
9373 break;
9374
9375 case IEMMODE_32BIT:
9376 IEM_MC_BEGIN(4, 0);
9377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9378 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9379 IEM_MC_ARG(uint32_t, u32Src, 1);
9380 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9381 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9382
9383 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9384 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9385 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9386 IEM_MC_REF_EFLAGS(pEFlags);
9387 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9388
9389 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9390 IEM_MC_ADVANCE_RIP_AND_FINISH();
9391 IEM_MC_END();
9392 break;
9393
9394 case IEMMODE_64BIT:
9395 IEM_MC_BEGIN(4, 0);
9396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9398 IEM_MC_ARG(uint64_t, u64Src, 1);
9399 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9400 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9401
9402 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9403 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9404 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9405 IEM_MC_REF_EFLAGS(pEFlags);
9406 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9407
9408 IEM_MC_ADVANCE_RIP_AND_FINISH();
9409 IEM_MC_END();
9410 break;
9411
9412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9413 }
9414 }
9415 else
9416 {
9417 switch (pVCpu->iem.s.enmEffOpSize)
9418 {
9419 case IEMMODE_16BIT:
9420 IEM_MC_BEGIN(4, 2);
9421 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9422 IEM_MC_ARG(uint16_t, u16Src, 1);
9423 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9424 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9426
9427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9429 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9430 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9431 IEM_MC_FETCH_EFLAGS(EFlags);
9432 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9433 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9434
9435 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9436 IEM_MC_COMMIT_EFLAGS(EFlags);
9437 IEM_MC_ADVANCE_RIP_AND_FINISH();
9438 IEM_MC_END();
9439 break;
9440
9441 case IEMMODE_32BIT:
9442 IEM_MC_BEGIN(4, 2);
9443 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9444 IEM_MC_ARG(uint32_t, u32Src, 1);
9445 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9446 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9448
9449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9451 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9452 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9453 IEM_MC_FETCH_EFLAGS(EFlags);
9454 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9455 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9456
9457 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9458 IEM_MC_COMMIT_EFLAGS(EFlags);
9459 IEM_MC_ADVANCE_RIP_AND_FINISH();
9460 IEM_MC_END();
9461 break;
9462
9463 case IEMMODE_64BIT:
9464 IEM_MC_BEGIN(4, 2);
9465 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9466 IEM_MC_ARG(uint64_t, u64Src, 1);
9467 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9468 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9470
9471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9473 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9474 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9475 IEM_MC_FETCH_EFLAGS(EFlags);
9476 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9477 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9478
9479 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9480 IEM_MC_COMMIT_EFLAGS(EFlags);
9481 IEM_MC_ADVANCE_RIP_AND_FINISH();
9482 IEM_MC_END();
9483 break;
9484
9485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9486 }
9487 }
9488}
9489
9490
9491
9492/** Opcode 0x0f 0xa4. */
9493FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9494{
9495 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9496 IEMOP_HLP_MIN_386();
9497 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9498}
9499
9500
9501/** Opcode 0x0f 0xa5. */
9502FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9503{
9504 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9505 IEMOP_HLP_MIN_386();
9506 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9507}
9508
9509
9510/** Opcode 0x0f 0xa8. */
9511FNIEMOP_DEF(iemOp_push_gs)
9512{
9513 IEMOP_MNEMONIC(push_gs, "push gs");
9514 IEMOP_HLP_MIN_386();
9515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9516 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9517}
9518
9519
9520/** Opcode 0x0f 0xa9. */
9521FNIEMOP_DEF(iemOp_pop_gs)
9522{
9523 IEMOP_MNEMONIC(pop_gs, "pop gs");
9524 IEMOP_HLP_MIN_386();
9525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9526 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9527}
9528
9529
9530/** Opcode 0x0f 0xaa. */
9531FNIEMOP_DEF(iemOp_rsm)
9532{
9533 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9534 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9536 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
9537 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
9538 iemCImpl_rsm);
9539}
9540
9541
9542
9543/** Opcode 0x0f 0xab. */
9544FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9545{
9546 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9547 IEMOP_HLP_MIN_386();
9548 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64, IEM_ACCESS_DATA_RW);
9549 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9550}
9551
9552
9553/** Opcode 0x0f 0xac. */
9554FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9555{
9556 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9557 IEMOP_HLP_MIN_386();
9558 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9559}
9560
9561
9562/** Opcode 0x0f 0xad. */
9563FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9564{
9565 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9566 IEMOP_HLP_MIN_386();
9567 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9568}
9569
9570
9571/** Opcode 0x0f 0xae mem/0. */
9572FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9573{
9574 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9575 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9576 IEMOP_RAISE_INVALID_OPCODE_RET();
9577
9578 IEM_MC_BEGIN(3, 1);
9579 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9580 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9581 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9584 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9585 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9586 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9587 IEM_MC_END();
9588}
9589
9590
9591/** Opcode 0x0f 0xae mem/1. */
9592FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9593{
9594 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9595 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9596 IEMOP_RAISE_INVALID_OPCODE_RET();
9597
9598 IEM_MC_BEGIN(3, 1);
9599 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9600 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9601 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9604 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9605 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9606 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9607 IEM_MC_END();
9608}
9609
9610
9611/**
9612 * @opmaps grp15
9613 * @opcode !11/2
9614 * @oppfx none
9615 * @opcpuid sse
9616 * @opgroup og_sse_mxcsrsm
9617 * @opxcpttype 5
9618 * @optest op1=0 -> mxcsr=0
9619 * @optest op1=0x2083 -> mxcsr=0x2083
9620 * @optest op1=0xfffffffe -> value.xcpt=0xd
9621 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9622 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9623 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9624 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9625 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9626 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9627 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9628 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9629 */
9630FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9631{
9632 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9633 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9634 IEMOP_RAISE_INVALID_OPCODE_RET();
9635
9636 IEM_MC_BEGIN(2, 0);
9637 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9638 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9641 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9642 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9643 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9644 IEM_MC_END();
9645}
9646
9647
9648/**
9649 * @opmaps grp15
9650 * @opcode !11/3
9651 * @oppfx none
9652 * @opcpuid sse
9653 * @opgroup og_sse_mxcsrsm
9654 * @opxcpttype 5
9655 * @optest mxcsr=0 -> op1=0
9656 * @optest mxcsr=0x2083 -> op1=0x2083
9657 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9658 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9659 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9660 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9661 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9662 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9663 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9664 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9665 */
9666FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9667{
9668 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9669 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9670 IEMOP_RAISE_INVALID_OPCODE_RET();
9671
9672 IEM_MC_BEGIN(2, 0);
9673 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9674 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9678 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9679 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9680 IEM_MC_END();
9681}
9682
9683
9684/**
9685 * @opmaps grp15
9686 * @opcode !11/4
9687 * @oppfx none
9688 * @opcpuid xsave
9689 * @opgroup og_system
9690 * @opxcpttype none
9691 */
9692FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9693{
9694 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9695 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9696 IEMOP_RAISE_INVALID_OPCODE_RET();
9697
9698 IEM_MC_BEGIN(3, 0);
9699 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9700 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9701 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9704 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9705 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9706 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9707 IEM_MC_END();
9708}
9709
9710
9711/**
9712 * @opmaps grp15
9713 * @opcode !11/5
9714 * @oppfx none
9715 * @opcpuid xsave
9716 * @opgroup og_system
9717 * @opxcpttype none
9718 */
9719FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9720{
9721 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9722 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9723 IEMOP_RAISE_INVALID_OPCODE_RET();
9724
9725 IEM_MC_BEGIN(3, 0);
9726 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9727 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9728 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9731 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9732 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9733 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9734 IEM_MC_END();
9735}
9736
9737/** Opcode 0x0f 0xae mem/6. */
9738FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9739
9740/**
9741 * @opmaps grp15
9742 * @opcode !11/7
9743 * @oppfx none
9744 * @opcpuid clfsh
9745 * @opgroup og_cachectl
9746 * @optest op1=1 ->
9747 */
9748FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9749{
9750 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9751 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9752 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9753
9754 IEM_MC_BEGIN(2, 0);
9755 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9756 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9759 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9760 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9761 IEM_MC_END();
9762}
9763
9764/**
9765 * @opmaps grp15
9766 * @opcode !11/7
9767 * @oppfx 0x66
9768 * @opcpuid clflushopt
9769 * @opgroup og_cachectl
9770 * @optest op1=1 ->
9771 */
9772FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9773{
9774 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9776 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9777
9778 IEM_MC_BEGIN(2, 0);
9779 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9780 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9783 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9784 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9785 IEM_MC_END();
9786}
9787
9788
9789/** Opcode 0x0f 0xae 11b/5. */
9790FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9791{
9792 RT_NOREF_PV(bRm);
9793 IEMOP_MNEMONIC(lfence, "lfence");
9794 IEM_MC_BEGIN(0, 0);
9795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9796#ifdef RT_ARCH_ARM64
9797 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9798#else
9799 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9800 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9801 else
9802 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9803#endif
9804 IEM_MC_ADVANCE_RIP_AND_FINISH();
9805 IEM_MC_END();
9806}
9807
9808
9809/** Opcode 0x0f 0xae 11b/6. */
9810FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9811{
9812 RT_NOREF_PV(bRm);
9813 IEMOP_MNEMONIC(mfence, "mfence");
9814 IEM_MC_BEGIN(0, 0);
9815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9816#ifdef RT_ARCH_ARM64
9817 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9818#else
9819 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9820 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9821 else
9822 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9823#endif
9824 IEM_MC_ADVANCE_RIP_AND_FINISH();
9825 IEM_MC_END();
9826}
9827
9828
9829/** Opcode 0x0f 0xae 11b/7. */
9830FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9831{
9832 RT_NOREF_PV(bRm);
9833 IEMOP_MNEMONIC(sfence, "sfence");
9834 IEM_MC_BEGIN(0, 0);
9835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9836#ifdef RT_ARCH_ARM64
9837 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9838#else
9839 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9840 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9841 else
9842 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9843#endif
9844 IEM_MC_ADVANCE_RIP_AND_FINISH();
9845 IEM_MC_END();
9846}
9847
9848
9849/** Opcode 0xf3 0x0f 0xae 11b/0. */
9850FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9851{
9852 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9853 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9854 {
9855 IEM_MC_BEGIN(1, 0);
9856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9857 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9858 IEM_MC_ARG(uint64_t, u64Dst, 0);
9859 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9860 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9861 IEM_MC_ADVANCE_RIP_AND_FINISH();
9862 IEM_MC_END();
9863 }
9864 else
9865 {
9866 IEM_MC_BEGIN(1, 0);
9867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9868 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9869 IEM_MC_ARG(uint32_t, u32Dst, 0);
9870 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9871 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9872 IEM_MC_ADVANCE_RIP_AND_FINISH();
9873 IEM_MC_END();
9874 }
9875}
9876
9877
9878/** Opcode 0xf3 0x0f 0xae 11b/1. */
9879FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9880{
9881 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9882 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9883 {
9884 IEM_MC_BEGIN(1, 0);
9885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9886 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9887 IEM_MC_ARG(uint64_t, u64Dst, 0);
9888 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9889 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9890 IEM_MC_ADVANCE_RIP_AND_FINISH();
9891 IEM_MC_END();
9892 }
9893 else
9894 {
9895 IEM_MC_BEGIN(1, 0);
9896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9897 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9898 IEM_MC_ARG(uint32_t, u32Dst, 0);
9899 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9900 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9901 IEM_MC_ADVANCE_RIP_AND_FINISH();
9902 IEM_MC_END();
9903 }
9904}
9905
9906
9907/** Opcode 0xf3 0x0f 0xae 11b/2. */
9908FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9909{
9910 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9911 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9912 {
9913 IEM_MC_BEGIN(1, 0);
9914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9915 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9916 IEM_MC_ARG(uint64_t, u64Dst, 0);
9917 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9918 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9919 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9920 IEM_MC_ADVANCE_RIP_AND_FINISH();
9921 IEM_MC_END();
9922 }
9923 else
9924 {
9925 IEM_MC_BEGIN(1, 0);
9926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9927 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9928 IEM_MC_ARG(uint32_t, u32Dst, 0);
9929 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9930 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9931 IEM_MC_ADVANCE_RIP_AND_FINISH();
9932 IEM_MC_END();
9933 }
9934}
9935
9936
9937/** Opcode 0xf3 0x0f 0xae 11b/3. */
9938FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9939{
9940 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9941 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9942 {
9943 IEM_MC_BEGIN(1, 0);
9944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9945 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9946 IEM_MC_ARG(uint64_t, u64Dst, 0);
9947 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9948 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9949 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9950 IEM_MC_ADVANCE_RIP_AND_FINISH();
9951 IEM_MC_END();
9952 }
9953 else
9954 {
9955 IEM_MC_BEGIN(1, 0);
9956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9957 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9958 IEM_MC_ARG(uint32_t, u32Dst, 0);
9959 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9960 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
9961 IEM_MC_ADVANCE_RIP_AND_FINISH();
9962 IEM_MC_END();
9963 }
9964}
9965
9966
9967/**
9968 * Group 15 jump table for register variant.
9969 */
9970IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
9971{ /* pfx: none, 066h, 0f3h, 0f2h */
9972 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
9973 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
9974 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
9975 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
9976 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9977 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9978 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9979 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9980};
9981AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
9982
9983
9984/**
9985 * Group 15 jump table for memory variant.
9986 */
9987IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
9988{ /* pfx: none, 066h, 0f3h, 0f2h */
9989 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9990 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9991 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9992 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9993 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9994 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9995 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9996 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9997};
9998AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
9999
10000
10001/** Opcode 0x0f 0xae. */
10002FNIEMOP_DEF(iemOp_Grp15)
10003{
10004 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10006 if (IEM_IS_MODRM_REG_MODE(bRm))
10007 /* register, register */
10008 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10009 + pVCpu->iem.s.idxPrefix], bRm);
10010 /* memory, register */
10011 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10012 + pVCpu->iem.s.idxPrefix], bRm);
10013}
10014
10015
10016/** Opcode 0x0f 0xaf. */
10017FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10018{
10019 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10020 IEMOP_HLP_MIN_386();
10021 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10022 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10023 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10024}
10025
10026
10027/** Opcode 0x0f 0xb0. */
10028FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10029{
10030 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10031 IEMOP_HLP_MIN_486();
10032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10033
10034 if (IEM_IS_MODRM_REG_MODE(bRm))
10035 {
10036 IEM_MC_BEGIN(4, 0);
10037 IEMOP_HLP_DONE_DECODING();
10038 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10039 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10040 IEM_MC_ARG(uint8_t, u8Src, 2);
10041 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10042
10043 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10044 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10045 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10046 IEM_MC_REF_EFLAGS(pEFlags);
10047 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10048 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10049 else
10050 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10051
10052 IEM_MC_ADVANCE_RIP_AND_FINISH();
10053 IEM_MC_END();
10054 }
10055 else
10056 {
10057 IEM_MC_BEGIN(4, 3);
10058 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10059 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10060 IEM_MC_ARG(uint8_t, u8Src, 2);
10061 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10063 IEM_MC_LOCAL(uint8_t, u8Al);
10064
10065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10066 IEMOP_HLP_DONE_DECODING();
10067 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10068 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10069 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10070 IEM_MC_FETCH_EFLAGS(EFlags);
10071 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10072 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10073 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10074 else
10075 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10076
10077 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10078 IEM_MC_COMMIT_EFLAGS(EFlags);
10079 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10080 IEM_MC_ADVANCE_RIP_AND_FINISH();
10081 IEM_MC_END();
10082 }
10083}
10084
10085/** Opcode 0x0f 0xb1. */
10086FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10087{
10088 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10089 IEMOP_HLP_MIN_486();
10090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10091
10092 if (IEM_IS_MODRM_REG_MODE(bRm))
10093 {
10094 switch (pVCpu->iem.s.enmEffOpSize)
10095 {
10096 case IEMMODE_16BIT:
10097 IEM_MC_BEGIN(4, 0);
10098 IEMOP_HLP_DONE_DECODING();
10099 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10100 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10101 IEM_MC_ARG(uint16_t, u16Src, 2);
10102 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10103
10104 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10105 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10106 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10107 IEM_MC_REF_EFLAGS(pEFlags);
10108 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10109 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10110 else
10111 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10112
10113 IEM_MC_ADVANCE_RIP_AND_FINISH();
10114 IEM_MC_END();
10115 break;
10116
10117 case IEMMODE_32BIT:
10118 IEM_MC_BEGIN(4, 0);
10119 IEMOP_HLP_DONE_DECODING();
10120 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10121 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10122 IEM_MC_ARG(uint32_t, u32Src, 2);
10123 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10124
10125 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10126 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10127 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10128 IEM_MC_REF_EFLAGS(pEFlags);
10129 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10130 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10131 else
10132 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10133
10134 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10135 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10136 } IEM_MC_ELSE() {
10137 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10138 } IEM_MC_ENDIF();
10139
10140 IEM_MC_ADVANCE_RIP_AND_FINISH();
10141 IEM_MC_END();
10142 break;
10143
10144 case IEMMODE_64BIT:
10145 IEM_MC_BEGIN(4, 0);
10146 IEMOP_HLP_DONE_DECODING();
10147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10148 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10149#ifdef RT_ARCH_X86
10150 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10151#else
10152 IEM_MC_ARG(uint64_t, u64Src, 2);
10153#endif
10154 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10155
10156 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10157 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10158 IEM_MC_REF_EFLAGS(pEFlags);
10159#ifdef RT_ARCH_X86
10160 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10161 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10162 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10163 else
10164 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10165#else
10166 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10168 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10169 else
10170 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10171#endif
10172
10173 IEM_MC_ADVANCE_RIP_AND_FINISH();
10174 IEM_MC_END();
10175 break;
10176
10177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10178 }
10179 }
10180 else
10181 {
10182 switch (pVCpu->iem.s.enmEffOpSize)
10183 {
10184 case IEMMODE_16BIT:
10185 IEM_MC_BEGIN(4, 3);
10186 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10187 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10188 IEM_MC_ARG(uint16_t, u16Src, 2);
10189 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10191 IEM_MC_LOCAL(uint16_t, u16Ax);
10192
10193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10194 IEMOP_HLP_DONE_DECODING();
10195 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10196 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10197 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10198 IEM_MC_FETCH_EFLAGS(EFlags);
10199 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10200 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10201 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10202 else
10203 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10204
10205 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10206 IEM_MC_COMMIT_EFLAGS(EFlags);
10207 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10208 IEM_MC_ADVANCE_RIP_AND_FINISH();
10209 IEM_MC_END();
10210 break;
10211
10212 case IEMMODE_32BIT:
10213 IEM_MC_BEGIN(4, 3);
10214 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10215 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10216 IEM_MC_ARG(uint32_t, u32Src, 2);
10217 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10219 IEM_MC_LOCAL(uint32_t, u32Eax);
10220
10221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10222 IEMOP_HLP_DONE_DECODING();
10223 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10224 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10225 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10226 IEM_MC_FETCH_EFLAGS(EFlags);
10227 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10228 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10229 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10230 else
10231 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10232
10233 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10234 IEM_MC_COMMIT_EFLAGS(EFlags);
10235
10236 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10237 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10238 } IEM_MC_ENDIF();
10239
10240 IEM_MC_ADVANCE_RIP_AND_FINISH();
10241 IEM_MC_END();
10242 break;
10243
10244 case IEMMODE_64BIT:
10245 IEM_MC_BEGIN(4, 3);
10246 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10247 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10248#ifdef RT_ARCH_X86
10249 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10250#else
10251 IEM_MC_ARG(uint64_t, u64Src, 2);
10252#endif
10253 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10255 IEM_MC_LOCAL(uint64_t, u64Rax);
10256
10257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10258 IEMOP_HLP_DONE_DECODING();
10259 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10260 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10261 IEM_MC_FETCH_EFLAGS(EFlags);
10262 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10263#ifdef RT_ARCH_X86
10264 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10265 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10266 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10267 else
10268 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10269#else
10270 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10271 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10272 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10273 else
10274 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10275#endif
10276
10277 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10278 IEM_MC_COMMIT_EFLAGS(EFlags);
10279 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10280 IEM_MC_ADVANCE_RIP_AND_FINISH();
10281 IEM_MC_END();
10282 break;
10283
10284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10285 }
10286 }
10287}
10288
10289
10290/** Opcode 0x0f 0xb2. */
10291FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10292{
10293 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10294 IEMOP_HLP_MIN_386();
10295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10296 if (IEM_IS_MODRM_REG_MODE(bRm))
10297 IEMOP_RAISE_INVALID_OPCODE_RET();
10298 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10299}
10300
10301
10302/** Opcode 0x0f 0xb3. */
10303FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10304{
10305 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10306 IEMOP_HLP_MIN_386();
10307 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64, IEM_ACCESS_DATA_RW);
10308 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10309}
10310
10311
10312/** Opcode 0x0f 0xb4. */
10313FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10314{
10315 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10316 IEMOP_HLP_MIN_386();
10317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10318 if (IEM_IS_MODRM_REG_MODE(bRm))
10319 IEMOP_RAISE_INVALID_OPCODE_RET();
10320 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10321}
10322
10323
10324/** Opcode 0x0f 0xb5. */
10325FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10326{
10327 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10328 IEMOP_HLP_MIN_386();
10329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10330 if (IEM_IS_MODRM_REG_MODE(bRm))
10331 IEMOP_RAISE_INVALID_OPCODE_RET();
10332 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10333}
10334
10335
10336/** Opcode 0x0f 0xb6. */
10337FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10338{
10339 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10340 IEMOP_HLP_MIN_386();
10341
10342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10343
10344 /*
10345 * If rm is denoting a register, no more instruction bytes.
10346 */
10347 if (IEM_IS_MODRM_REG_MODE(bRm))
10348 {
10349 switch (pVCpu->iem.s.enmEffOpSize)
10350 {
10351 case IEMMODE_16BIT:
10352 IEM_MC_BEGIN(0, 1);
10353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10354 IEM_MC_LOCAL(uint16_t, u16Value);
10355 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10356 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10357 IEM_MC_ADVANCE_RIP_AND_FINISH();
10358 IEM_MC_END();
10359 break;
10360
10361 case IEMMODE_32BIT:
10362 IEM_MC_BEGIN(0, 1);
10363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10364 IEM_MC_LOCAL(uint32_t, u32Value);
10365 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10366 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10367 IEM_MC_ADVANCE_RIP_AND_FINISH();
10368 IEM_MC_END();
10369 break;
10370
10371 case IEMMODE_64BIT:
10372 IEM_MC_BEGIN(0, 1);
10373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10374 IEM_MC_LOCAL(uint64_t, u64Value);
10375 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10376 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10377 IEM_MC_ADVANCE_RIP_AND_FINISH();
10378 IEM_MC_END();
10379 break;
10380
10381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10382 }
10383 }
10384 else
10385 {
10386 /*
10387 * We're loading a register from memory.
10388 */
10389 switch (pVCpu->iem.s.enmEffOpSize)
10390 {
10391 case IEMMODE_16BIT:
10392 IEM_MC_BEGIN(0, 2);
10393 IEM_MC_LOCAL(uint16_t, u16Value);
10394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10397 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10398 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10399 IEM_MC_ADVANCE_RIP_AND_FINISH();
10400 IEM_MC_END();
10401 break;
10402
10403 case IEMMODE_32BIT:
10404 IEM_MC_BEGIN(0, 2);
10405 IEM_MC_LOCAL(uint32_t, u32Value);
10406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10409 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10410 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10411 IEM_MC_ADVANCE_RIP_AND_FINISH();
10412 IEM_MC_END();
10413 break;
10414
10415 case IEMMODE_64BIT:
10416 IEM_MC_BEGIN(0, 2);
10417 IEM_MC_LOCAL(uint64_t, u64Value);
10418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10421 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10422 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10423 IEM_MC_ADVANCE_RIP_AND_FINISH();
10424 IEM_MC_END();
10425 break;
10426
10427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10428 }
10429 }
10430}
10431
10432
10433/** Opcode 0x0f 0xb7. */
10434FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10435{
10436 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10437 IEMOP_HLP_MIN_386();
10438
10439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10440
10441 /** @todo Not entirely sure how the operand size prefix is handled here,
10442 * assuming that it will be ignored. Would be nice to have a few
10443 * test for this. */
10444 /*
10445 * If rm is denoting a register, no more instruction bytes.
10446 */
10447 if (IEM_IS_MODRM_REG_MODE(bRm))
10448 {
10449 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10450 {
10451 IEM_MC_BEGIN(0, 1);
10452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10453 IEM_MC_LOCAL(uint32_t, u32Value);
10454 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10455 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10456 IEM_MC_ADVANCE_RIP_AND_FINISH();
10457 IEM_MC_END();
10458 }
10459 else
10460 {
10461 IEM_MC_BEGIN(0, 1);
10462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10463 IEM_MC_LOCAL(uint64_t, u64Value);
10464 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10465 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10466 IEM_MC_ADVANCE_RIP_AND_FINISH();
10467 IEM_MC_END();
10468 }
10469 }
10470 else
10471 {
10472 /*
10473 * We're loading a register from memory.
10474 */
10475 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10476 {
10477 IEM_MC_BEGIN(0, 2);
10478 IEM_MC_LOCAL(uint32_t, u32Value);
10479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10482 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10483 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10484 IEM_MC_ADVANCE_RIP_AND_FINISH();
10485 IEM_MC_END();
10486 }
10487 else
10488 {
10489 IEM_MC_BEGIN(0, 2);
10490 IEM_MC_LOCAL(uint64_t, u64Value);
10491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10494 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10495 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10496 IEM_MC_ADVANCE_RIP_AND_FINISH();
10497 IEM_MC_END();
10498 }
10499 }
10500}
10501
10502
10503/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10504FNIEMOP_UD_STUB(iemOp_jmpe);
10505
10506
10507/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10508FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10509{
10510 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10511 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10512 return iemOp_InvalidNeedRM(pVCpu);
10513#ifndef TST_IEM_CHECK_MC
10514# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10515 static const IEMOPBINSIZES s_Native =
10516 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10517# endif
10518 static const IEMOPBINSIZES s_Fallback =
10519 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10520#endif
10521 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10522 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10523}
10524
10525
10526/**
10527 * @opcode 0xb9
10528 * @opinvalid intel-modrm
10529 * @optest ->
10530 */
10531FNIEMOP_DEF(iemOp_Grp10)
10532{
10533 /*
10534 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10535 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10536 */
10537 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10538 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10539 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10540}
10541
10542
10543/**
10544 * Body for group 8 bit instruction.
10545 */
10546#define IEMOP_BODY_BIT_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
10547 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10548 \
10549 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10550 { \
10551 /* register destination. */ \
10552 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10553 \
10554 switch (pVCpu->iem.s.enmEffOpSize) \
10555 { \
10556 case IEMMODE_16BIT: \
10557 IEM_MC_BEGIN(3, 0); \
10558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10559 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10560 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10561 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10562 \
10563 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10564 IEM_MC_REF_EFLAGS(pEFlags); \
10565 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10566 \
10567 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10568 IEM_MC_END(); \
10569 break; \
10570 \
10571 case IEMMODE_32BIT: \
10572 IEM_MC_BEGIN(3, 0); \
10573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10574 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10575 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10576 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10577 \
10578 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10579 IEM_MC_REF_EFLAGS(pEFlags); \
10580 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10581 \
10582 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
10583 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10584 IEM_MC_END(); \
10585 break; \
10586 \
10587 case IEMMODE_64BIT: \
10588 IEM_MC_BEGIN(3, 0); \
10589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10590 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10591 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10592 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10593 \
10594 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10595 IEM_MC_REF_EFLAGS(pEFlags); \
10596 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10597 \
10598 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10599 IEM_MC_END(); \
10600 break; \
10601 \
10602 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10603 } \
10604 } \
10605 else \
10606 { \
10607 /* memory destination. */ \
10608 /** @todo test negative bit offsets! */ \
10609 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10610 { \
10611 switch (pVCpu->iem.s.enmEffOpSize) \
10612 { \
10613 case IEMMODE_16BIT: \
10614 IEM_MC_BEGIN(3, 1); \
10615 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10616 IEM_MC_ARG(uint16_t, u16Src, 1); \
10617 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10619 \
10620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10621 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10622 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10623 IEMOP_HLP_DONE_DECODING(); \
10624 IEM_MC_FETCH_EFLAGS(EFlags); \
10625 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10626 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10627 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
10628 \
10629 IEM_MC_COMMIT_EFLAGS(EFlags); \
10630 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10631 IEM_MC_END(); \
10632 break; \
10633 \
10634 case IEMMODE_32BIT: \
10635 IEM_MC_BEGIN(3, 1); \
10636 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10637 IEM_MC_ARG(uint32_t, u32Src, 1); \
10638 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10640 \
10641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10642 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10643 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10644 IEMOP_HLP_DONE_DECODING(); \
10645 IEM_MC_FETCH_EFLAGS(EFlags); \
10646 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10647 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10648 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
10649 \
10650 IEM_MC_COMMIT_EFLAGS(EFlags); \
10651 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10652 IEM_MC_END(); \
10653 break; \
10654 \
10655 case IEMMODE_64BIT: \
10656 IEM_MC_BEGIN(3, 1); \
10657 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10658 IEM_MC_ARG(uint64_t, u64Src, 1); \
10659 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10661 \
10662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10663 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10664 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10665 IEMOP_HLP_DONE_DECODING(); \
10666 IEM_MC_FETCH_EFLAGS(EFlags); \
10667 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10669 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
10670 \
10671 IEM_MC_COMMIT_EFLAGS(EFlags); \
10672 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10673 IEM_MC_END(); \
10674 break; \
10675 \
10676 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10677 } \
10678 } \
10679 else \
10680 { \
10681 (void)0
10682
10683#define IEMOP_BODY_BIT_Ev_Ib_NO_LOCK() \
10684 IEMOP_HLP_DONE_DECODING(); \
10685 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
10686 } \
10687 } \
10688 (void)0
10689
10690#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10691 switch (pVCpu->iem.s.enmEffOpSize) \
10692 { \
10693 case IEMMODE_16BIT: \
10694 IEM_MC_BEGIN(3, 1); \
10695 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10696 IEM_MC_ARG(uint16_t, u16Src, 1); \
10697 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10699 \
10700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10701 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10702 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10703 IEMOP_HLP_DONE_DECODING(); \
10704 IEM_MC_FETCH_EFLAGS(EFlags); \
10705 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10706 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10707 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
10708 \
10709 IEM_MC_COMMIT_EFLAGS(EFlags); \
10710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10711 IEM_MC_END(); \
10712 break; \
10713 \
10714 case IEMMODE_32BIT: \
10715 IEM_MC_BEGIN(3, 1); \
10716 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10717 IEM_MC_ARG(uint32_t, u32Src, 1); \
10718 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10720 \
10721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10722 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10723 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10724 IEMOP_HLP_DONE_DECODING(); \
10725 IEM_MC_FETCH_EFLAGS(EFlags); \
10726 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10727 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10728 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
10729 \
10730 IEM_MC_COMMIT_EFLAGS(EFlags); \
10731 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10732 IEM_MC_END(); \
10733 break; \
10734 \
10735 case IEMMODE_64BIT: \
10736 IEM_MC_BEGIN(3, 1); \
10737 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10738 IEM_MC_ARG(uint64_t, u64Src, 1); \
10739 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10741 \
10742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10743 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10744 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10745 IEMOP_HLP_DONE_DECODING(); \
10746 IEM_MC_FETCH_EFLAGS(EFlags); \
10747 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10748 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
10750 \
10751 IEM_MC_COMMIT_EFLAGS(EFlags); \
10752 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10753 IEM_MC_END(); \
10754 break; \
10755 \
10756 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10757 } \
10758 } \
10759 } \
10760 (void)0
10761
10762
10763/** Opcode 0x0f 0xba /4. */
10764FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
10765{
10766 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
10767 IEMOP_BODY_BIT_Ev_Ib(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64, IEM_ACCESS_DATA_R);
10768 IEMOP_BODY_BIT_Ev_Ib_NO_LOCK();
10769}
10770
10771
10772/** Opcode 0x0f 0xba /5. */
10773FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
10774{
10775 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
10776 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64, IEM_ACCESS_DATA_RW);
10777 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
10778}
10779
10780
10781/** Opcode 0x0f 0xba /6. */
10782FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
10783{
10784 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
10785 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64, IEM_ACCESS_DATA_RW);
10786 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10787}
10788
10789
10790/** Opcode 0x0f 0xba /7. */
10791FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
10792{
10793 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
10794 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64, IEM_ACCESS_DATA_RW);
10795 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10796}
10797
10798
10799/** Opcode 0x0f 0xba. */
10800FNIEMOP_DEF(iemOp_Grp8)
10801{
10802 IEMOP_HLP_MIN_386();
10803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10804 switch (IEM_GET_MODRM_REG_8(bRm))
10805 {
10806 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
10807 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
10808 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
10809 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
10810
10811 case 0: case 1: case 2: case 3:
10812 /* Both AMD and Intel want full modr/m decoding and imm8. */
10813 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10814
10815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10816 }
10817}
10818
10819
10820/** Opcode 0x0f 0xbb. */
10821FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10822{
10823 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10824 IEMOP_HLP_MIN_386();
10825 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64, IEM_ACCESS_DATA_RW);
10826 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10827}
10828
10829
10830/**
10831 * Common worker for BSF and BSR instructions.
10832 *
10833 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10834 * the destination register, which means that for 32-bit operations the high
10835 * bits must be left alone.
10836 *
10837 * @param pImpl Pointer to the instruction implementation (assembly).
10838 */
10839FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10840{
10841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10842
10843 /*
10844 * If rm is denoting a register, no more instruction bytes.
10845 */
10846 if (IEM_IS_MODRM_REG_MODE(bRm))
10847 {
10848 switch (pVCpu->iem.s.enmEffOpSize)
10849 {
10850 case IEMMODE_16BIT:
10851 IEM_MC_BEGIN(3, 0);
10852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10854 IEM_MC_ARG(uint16_t, u16Src, 1);
10855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10856
10857 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10858 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10859 IEM_MC_REF_EFLAGS(pEFlags);
10860 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10861
10862 IEM_MC_ADVANCE_RIP_AND_FINISH();
10863 IEM_MC_END();
10864 break;
10865
10866 case IEMMODE_32BIT:
10867 IEM_MC_BEGIN(3, 0);
10868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10870 IEM_MC_ARG(uint32_t, u32Src, 1);
10871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10872
10873 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10874 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10875 IEM_MC_REF_EFLAGS(pEFlags);
10876 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10877 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10878 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10879 } IEM_MC_ENDIF();
10880 IEM_MC_ADVANCE_RIP_AND_FINISH();
10881 IEM_MC_END();
10882 break;
10883
10884 case IEMMODE_64BIT:
10885 IEM_MC_BEGIN(3, 0);
10886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10887 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10888 IEM_MC_ARG(uint64_t, u64Src, 1);
10889 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10890
10891 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10892 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10893 IEM_MC_REF_EFLAGS(pEFlags);
10894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10895
10896 IEM_MC_ADVANCE_RIP_AND_FINISH();
10897 IEM_MC_END();
10898 break;
10899
10900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10901 }
10902 }
10903 else
10904 {
10905 /*
10906 * We're accessing memory.
10907 */
10908 switch (pVCpu->iem.s.enmEffOpSize)
10909 {
10910 case IEMMODE_16BIT:
10911 IEM_MC_BEGIN(3, 1);
10912 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10913 IEM_MC_ARG(uint16_t, u16Src, 1);
10914 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10916
10917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10919 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10920 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10921 IEM_MC_REF_EFLAGS(pEFlags);
10922 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10923
10924 IEM_MC_ADVANCE_RIP_AND_FINISH();
10925 IEM_MC_END();
10926 break;
10927
10928 case IEMMODE_32BIT:
10929 IEM_MC_BEGIN(3, 1);
10930 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10931 IEM_MC_ARG(uint32_t, u32Src, 1);
10932 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10934
10935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10937 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10938 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10939 IEM_MC_REF_EFLAGS(pEFlags);
10940 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10941
10942 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10943 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10944 } IEM_MC_ENDIF();
10945 IEM_MC_ADVANCE_RIP_AND_FINISH();
10946 IEM_MC_END();
10947 break;
10948
10949 case IEMMODE_64BIT:
10950 IEM_MC_BEGIN(3, 1);
10951 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10952 IEM_MC_ARG(uint64_t, u64Src, 1);
10953 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10955
10956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10958 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10959 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10960 IEM_MC_REF_EFLAGS(pEFlags);
10961 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10962
10963 IEM_MC_ADVANCE_RIP_AND_FINISH();
10964 IEM_MC_END();
10965 break;
10966
10967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10968 }
10969 }
10970}
10971
10972
10973/** Opcode 0x0f 0xbc. */
10974FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
10975{
10976 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
10977 IEMOP_HLP_MIN_386();
10978 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10979 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
10980}
10981
10982
10983/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
10984FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
10985{
10986 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10987 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
10988 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10989
10990#ifndef TST_IEM_CHECK_MC
10991 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
10992 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
10993 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
10994 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
10995 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
10996 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
10997 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
10998 {
10999 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11000 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11001 };
11002#endif
11003 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11004 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11005 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11006 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11007}
11008
11009
11010/** Opcode 0x0f 0xbd. */
11011FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11012{
11013 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11014 IEMOP_HLP_MIN_386();
11015 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11016 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11017}
11018
11019
11020/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11021FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11022{
11023 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11024 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11025 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11026
11027#ifndef TST_IEM_CHECK_MC
11028 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11029 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11030 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11031 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11032 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11033 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11034 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11035 {
11036 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11037 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11038 };
11039#endif
11040 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11041 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11042 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11043 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11044}
11045
11046
11047
11048/** Opcode 0x0f 0xbe. */
11049FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11050{
11051 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11052 IEMOP_HLP_MIN_386();
11053
11054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11055
11056 /*
11057 * If rm is denoting a register, no more instruction bytes.
11058 */
11059 if (IEM_IS_MODRM_REG_MODE(bRm))
11060 {
11061 switch (pVCpu->iem.s.enmEffOpSize)
11062 {
11063 case IEMMODE_16BIT:
11064 IEM_MC_BEGIN(0, 1);
11065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11066 IEM_MC_LOCAL(uint16_t, u16Value);
11067 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11068 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11069 IEM_MC_ADVANCE_RIP_AND_FINISH();
11070 IEM_MC_END();
11071 break;
11072
11073 case IEMMODE_32BIT:
11074 IEM_MC_BEGIN(0, 1);
11075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11076 IEM_MC_LOCAL(uint32_t, u32Value);
11077 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11078 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11079 IEM_MC_ADVANCE_RIP_AND_FINISH();
11080 IEM_MC_END();
11081 break;
11082
11083 case IEMMODE_64BIT:
11084 IEM_MC_BEGIN(0, 1);
11085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11086 IEM_MC_LOCAL(uint64_t, u64Value);
11087 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11088 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11089 IEM_MC_ADVANCE_RIP_AND_FINISH();
11090 IEM_MC_END();
11091 break;
11092
11093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11094 }
11095 }
11096 else
11097 {
11098 /*
11099 * We're loading a register from memory.
11100 */
11101 switch (pVCpu->iem.s.enmEffOpSize)
11102 {
11103 case IEMMODE_16BIT:
11104 IEM_MC_BEGIN(0, 2);
11105 IEM_MC_LOCAL(uint16_t, u16Value);
11106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11109 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11110 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11111 IEM_MC_ADVANCE_RIP_AND_FINISH();
11112 IEM_MC_END();
11113 break;
11114
11115 case IEMMODE_32BIT:
11116 IEM_MC_BEGIN(0, 2);
11117 IEM_MC_LOCAL(uint32_t, u32Value);
11118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11121 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11122 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11123 IEM_MC_ADVANCE_RIP_AND_FINISH();
11124 IEM_MC_END();
11125 break;
11126
11127 case IEMMODE_64BIT:
11128 IEM_MC_BEGIN(0, 2);
11129 IEM_MC_LOCAL(uint64_t, u64Value);
11130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11133 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11134 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11135 IEM_MC_ADVANCE_RIP_AND_FINISH();
11136 IEM_MC_END();
11137 break;
11138
11139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11140 }
11141 }
11142}
11143
11144
11145/** Opcode 0x0f 0xbf. */
11146FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11147{
11148 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11149 IEMOP_HLP_MIN_386();
11150
11151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11152
11153 /** @todo Not entirely sure how the operand size prefix is handled here,
11154 * assuming that it will be ignored. Would be nice to have a few
11155 * test for this. */
11156 /*
11157 * If rm is denoting a register, no more instruction bytes.
11158 */
11159 if (IEM_IS_MODRM_REG_MODE(bRm))
11160 {
11161 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11162 {
11163 IEM_MC_BEGIN(0, 1);
11164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11165 IEM_MC_LOCAL(uint32_t, u32Value);
11166 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11167 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11168 IEM_MC_ADVANCE_RIP_AND_FINISH();
11169 IEM_MC_END();
11170 }
11171 else
11172 {
11173 IEM_MC_BEGIN(0, 1);
11174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11175 IEM_MC_LOCAL(uint64_t, u64Value);
11176 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11177 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11178 IEM_MC_ADVANCE_RIP_AND_FINISH();
11179 IEM_MC_END();
11180 }
11181 }
11182 else
11183 {
11184 /*
11185 * We're loading a register from memory.
11186 */
11187 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11188 {
11189 IEM_MC_BEGIN(0, 2);
11190 IEM_MC_LOCAL(uint32_t, u32Value);
11191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11194 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11195 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11196 IEM_MC_ADVANCE_RIP_AND_FINISH();
11197 IEM_MC_END();
11198 }
11199 else
11200 {
11201 IEM_MC_BEGIN(0, 2);
11202 IEM_MC_LOCAL(uint64_t, u64Value);
11203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11206 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11207 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11208 IEM_MC_ADVANCE_RIP_AND_FINISH();
11209 IEM_MC_END();
11210 }
11211 }
11212}
11213
11214
11215/** Opcode 0x0f 0xc0. */
11216FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11217{
11218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11219 IEMOP_HLP_MIN_486();
11220 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11221
11222 /*
11223 * If rm is denoting a register, no more instruction bytes.
11224 */
11225 if (IEM_IS_MODRM_REG_MODE(bRm))
11226 {
11227 IEM_MC_BEGIN(3, 0);
11228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11229 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11230 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11231 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11232
11233 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11234 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11235 IEM_MC_REF_EFLAGS(pEFlags);
11236 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11237
11238 IEM_MC_ADVANCE_RIP_AND_FINISH();
11239 IEM_MC_END();
11240 }
11241 else
11242 {
11243 /*
11244 * We're accessing memory.
11245 */
11246 IEM_MC_BEGIN(3, 3);
11247 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11248 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11249 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11250 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11252
11253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11255 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11256 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11257 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11258 IEM_MC_FETCH_EFLAGS(EFlags);
11259 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11260 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11261 else
11262 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11263
11264 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11265 IEM_MC_COMMIT_EFLAGS(EFlags);
11266 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11267 IEM_MC_ADVANCE_RIP_AND_FINISH();
11268 IEM_MC_END();
11269 }
11270}
11271
11272
11273/** Opcode 0x0f 0xc1. */
11274FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11275{
11276 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11277 IEMOP_HLP_MIN_486();
11278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11279
11280 /*
11281 * If rm is denoting a register, no more instruction bytes.
11282 */
11283 if (IEM_IS_MODRM_REG_MODE(bRm))
11284 {
11285 switch (pVCpu->iem.s.enmEffOpSize)
11286 {
11287 case IEMMODE_16BIT:
11288 IEM_MC_BEGIN(3, 0);
11289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11290 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11291 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11292 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11293
11294 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11295 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11296 IEM_MC_REF_EFLAGS(pEFlags);
11297 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11298
11299 IEM_MC_ADVANCE_RIP_AND_FINISH();
11300 IEM_MC_END();
11301 break;
11302
11303 case IEMMODE_32BIT:
11304 IEM_MC_BEGIN(3, 0);
11305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11306 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11307 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11308 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11309
11310 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11311 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11312 IEM_MC_REF_EFLAGS(pEFlags);
11313 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11314
11315 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11316 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11317 IEM_MC_ADVANCE_RIP_AND_FINISH();
11318 IEM_MC_END();
11319 break;
11320
11321 case IEMMODE_64BIT:
11322 IEM_MC_BEGIN(3, 0);
11323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11324 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11325 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11326 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11327
11328 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11329 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11330 IEM_MC_REF_EFLAGS(pEFlags);
11331 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11332
11333 IEM_MC_ADVANCE_RIP_AND_FINISH();
11334 IEM_MC_END();
11335 break;
11336
11337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11338 }
11339 }
11340 else
11341 {
11342 /*
11343 * We're accessing memory.
11344 */
11345 switch (pVCpu->iem.s.enmEffOpSize)
11346 {
11347 case IEMMODE_16BIT:
11348 IEM_MC_BEGIN(3, 3);
11349 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11350 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11351 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11352 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11354
11355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11356 IEMOP_HLP_DONE_DECODING();
11357 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11358 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11359 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11360 IEM_MC_FETCH_EFLAGS(EFlags);
11361 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11362 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11363 else
11364 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11365
11366 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11367 IEM_MC_COMMIT_EFLAGS(EFlags);
11368 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11369 IEM_MC_ADVANCE_RIP_AND_FINISH();
11370 IEM_MC_END();
11371 break;
11372
11373 case IEMMODE_32BIT:
11374 IEM_MC_BEGIN(3, 3);
11375 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11376 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11377 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11378 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11380
11381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11382 IEMOP_HLP_DONE_DECODING();
11383 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11384 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11385 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11386 IEM_MC_FETCH_EFLAGS(EFlags);
11387 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11388 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11389 else
11390 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11391
11392 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11393 IEM_MC_COMMIT_EFLAGS(EFlags);
11394 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11395 IEM_MC_ADVANCE_RIP_AND_FINISH();
11396 IEM_MC_END();
11397 break;
11398
11399 case IEMMODE_64BIT:
11400 IEM_MC_BEGIN(3, 3);
11401 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11402 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11404 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11406
11407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11408 IEMOP_HLP_DONE_DECODING();
11409 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11410 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11411 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11412 IEM_MC_FETCH_EFLAGS(EFlags);
11413 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11414 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11415 else
11416 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11417
11418 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11419 IEM_MC_COMMIT_EFLAGS(EFlags);
11420 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11421 IEM_MC_ADVANCE_RIP_AND_FINISH();
11422 IEM_MC_END();
11423 break;
11424
11425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11426 }
11427 }
11428}
11429
11430
11431/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11432FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11433{
11434 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11435
11436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11437 if (IEM_IS_MODRM_REG_MODE(bRm))
11438 {
11439 /*
11440 * XMM, XMM.
11441 */
11442 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11443 IEM_MC_BEGIN(4, 2);
11444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11445 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11446 IEM_MC_LOCAL(X86XMMREG, Dst);
11447 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11448 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11449 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11450 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11451 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11452 IEM_MC_PREPARE_SSE_USAGE();
11453 IEM_MC_REF_MXCSR(pfMxcsr);
11454 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11455 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11456 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11457 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11458 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11459 } IEM_MC_ELSE() {
11460 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11461 } IEM_MC_ENDIF();
11462
11463 IEM_MC_ADVANCE_RIP_AND_FINISH();
11464 IEM_MC_END();
11465 }
11466 else
11467 {
11468 /*
11469 * XMM, [mem128].
11470 */
11471 IEM_MC_BEGIN(4, 3);
11472 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11473 IEM_MC_LOCAL(X86XMMREG, Dst);
11474 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11475 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11476 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11478
11479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11480 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11481 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11483 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11484 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11485
11486 IEM_MC_PREPARE_SSE_USAGE();
11487 IEM_MC_REF_MXCSR(pfMxcsr);
11488 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11489 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11490 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11491 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11492 } IEM_MC_ELSE() {
11493 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11494 } IEM_MC_ENDIF();
11495
11496 IEM_MC_ADVANCE_RIP_AND_FINISH();
11497 IEM_MC_END();
11498 }
11499}
11500
11501
11502/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11503FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11504{
11505 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11506
11507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11508 if (IEM_IS_MODRM_REG_MODE(bRm))
11509 {
11510 /*
11511 * XMM, XMM.
11512 */
11513 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11514 IEM_MC_BEGIN(4, 2);
11515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11516 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11517 IEM_MC_LOCAL(X86XMMREG, Dst);
11518 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11519 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11520 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11521 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11522 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11523 IEM_MC_PREPARE_SSE_USAGE();
11524 IEM_MC_REF_MXCSR(pfMxcsr);
11525 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11526 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11527 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11528 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11529 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11530 } IEM_MC_ELSE() {
11531 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11532 } IEM_MC_ENDIF();
11533
11534 IEM_MC_ADVANCE_RIP_AND_FINISH();
11535 IEM_MC_END();
11536 }
11537 else
11538 {
11539 /*
11540 * XMM, [mem128].
11541 */
11542 IEM_MC_BEGIN(4, 3);
11543 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11544 IEM_MC_LOCAL(X86XMMREG, Dst);
11545 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11546 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11547 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11549
11550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11551 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11552 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11554 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11555 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11556
11557 IEM_MC_PREPARE_SSE_USAGE();
11558 IEM_MC_REF_MXCSR(pfMxcsr);
11559 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11560 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11561 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11562 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11563 } IEM_MC_ELSE() {
11564 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11565 } IEM_MC_ENDIF();
11566
11567 IEM_MC_ADVANCE_RIP_AND_FINISH();
11568 IEM_MC_END();
11569 }
11570}
11571
11572
11573/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11574FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11575{
11576 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11577
11578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11579 if (IEM_IS_MODRM_REG_MODE(bRm))
11580 {
11581 /*
11582 * XMM32, XMM32.
11583 */
11584 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11585 IEM_MC_BEGIN(4, 2);
11586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11587 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11588 IEM_MC_LOCAL(X86XMMREG, Dst);
11589 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11590 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11591 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11592 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11593 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11594 IEM_MC_PREPARE_SSE_USAGE();
11595 IEM_MC_REF_MXCSR(pfMxcsr);
11596 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11597 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11598 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11599 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11600 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11601 } IEM_MC_ELSE() {
11602 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11603 } IEM_MC_ENDIF();
11604
11605 IEM_MC_ADVANCE_RIP_AND_FINISH();
11606 IEM_MC_END();
11607 }
11608 else
11609 {
11610 /*
11611 * XMM32, [mem32].
11612 */
11613 IEM_MC_BEGIN(4, 3);
11614 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11615 IEM_MC_LOCAL(X86XMMREG, Dst);
11616 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11617 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11618 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11620
11621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11622 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11623 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11625 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11626 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11627
11628 IEM_MC_PREPARE_SSE_USAGE();
11629 IEM_MC_REF_MXCSR(pfMxcsr);
11630 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11631 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11632 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11633 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11634 } IEM_MC_ELSE() {
11635 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11636 } IEM_MC_ENDIF();
11637
11638 IEM_MC_ADVANCE_RIP_AND_FINISH();
11639 IEM_MC_END();
11640 }
11641}
11642
11643
11644/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11645FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11646{
11647 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11648
11649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11650 if (IEM_IS_MODRM_REG_MODE(bRm))
11651 {
11652 /*
11653 * XMM64, XMM64.
11654 */
11655 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11656 IEM_MC_BEGIN(4, 2);
11657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11658 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11659 IEM_MC_LOCAL(X86XMMREG, Dst);
11660 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11661 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11662 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11663 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11665 IEM_MC_PREPARE_SSE_USAGE();
11666 IEM_MC_REF_MXCSR(pfMxcsr);
11667 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11668 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11669 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11670 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11671 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11672 } IEM_MC_ELSE() {
11673 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11674 } IEM_MC_ENDIF();
11675
11676 IEM_MC_ADVANCE_RIP_AND_FINISH();
11677 IEM_MC_END();
11678 }
11679 else
11680 {
11681 /*
11682 * XMM64, [mem64].
11683 */
11684 IEM_MC_BEGIN(4, 3);
11685 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11686 IEM_MC_LOCAL(X86XMMREG, Dst);
11687 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11688 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11689 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11691
11692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11693 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11694 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11696 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11697 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11698
11699 IEM_MC_PREPARE_SSE_USAGE();
11700 IEM_MC_REF_MXCSR(pfMxcsr);
11701 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11702 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11703 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11704 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11705 } IEM_MC_ELSE() {
11706 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11707 } IEM_MC_ENDIF();
11708
11709 IEM_MC_ADVANCE_RIP_AND_FINISH();
11710 IEM_MC_END();
11711 }
11712}
11713
11714
11715/** Opcode 0x0f 0xc3. */
11716FNIEMOP_DEF(iemOp_movnti_My_Gy)
11717{
11718 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11719
11720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11721
11722 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11723 if (IEM_IS_MODRM_MEM_MODE(bRm))
11724 {
11725 switch (pVCpu->iem.s.enmEffOpSize)
11726 {
11727 case IEMMODE_32BIT:
11728 IEM_MC_BEGIN(0, 2);
11729 IEM_MC_LOCAL(uint32_t, u32Value);
11730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11731
11732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11734
11735 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11736 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11737 IEM_MC_ADVANCE_RIP_AND_FINISH();
11738 IEM_MC_END();
11739 break;
11740
11741 case IEMMODE_64BIT:
11742 IEM_MC_BEGIN(0, 2);
11743 IEM_MC_LOCAL(uint64_t, u64Value);
11744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11745
11746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11748
11749 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11750 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11751 IEM_MC_ADVANCE_RIP_AND_FINISH();
11752 IEM_MC_END();
11753 break;
11754
11755 case IEMMODE_16BIT:
11756 /** @todo check this form. */
11757 IEMOP_RAISE_INVALID_OPCODE_RET();
11758
11759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11760 }
11761 }
11762 else
11763 IEMOP_RAISE_INVALID_OPCODE_RET();
11764}
11765
11766
11767/* Opcode 0x66 0x0f 0xc3 - invalid */
11768/* Opcode 0xf3 0x0f 0xc3 - invalid */
11769/* Opcode 0xf2 0x0f 0xc3 - invalid */
11770
11771
11772/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11773FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11774{
11775 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
11776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11777 if (IEM_IS_MODRM_REG_MODE(bRm))
11778 {
11779 /*
11780 * Register, register.
11781 */
11782 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11783 IEM_MC_BEGIN(3, 0);
11784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11785 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11786 IEM_MC_ARG(uint16_t, u16Src, 1);
11787 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11788 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11789 IEM_MC_PREPARE_FPU_USAGE();
11790 IEM_MC_FPU_TO_MMX_MODE();
11791 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11792 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11793 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11794 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11795 IEM_MC_ADVANCE_RIP_AND_FINISH();
11796 IEM_MC_END();
11797 }
11798 else
11799 {
11800 /*
11801 * Register, memory.
11802 */
11803 IEM_MC_BEGIN(3, 1);
11804 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11805 IEM_MC_ARG(uint16_t, u16Src, 1);
11806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11807
11808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11809 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11810 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11812 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11813 IEM_MC_PREPARE_FPU_USAGE();
11814 IEM_MC_FPU_TO_MMX_MODE();
11815
11816 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11817 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11818 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11819 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11820 IEM_MC_ADVANCE_RIP_AND_FINISH();
11821 IEM_MC_END();
11822 }
11823}
11824
11825
11826/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11827FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11828{
11829 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11831 if (IEM_IS_MODRM_REG_MODE(bRm))
11832 {
11833 /*
11834 * Register, register.
11835 */
11836 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11837 IEM_MC_BEGIN(3, 0);
11838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11839 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11840 IEM_MC_ARG(uint16_t, u16Src, 1);
11841 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11842 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11843 IEM_MC_PREPARE_SSE_USAGE();
11844 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11845 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11846 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11847 IEM_MC_ADVANCE_RIP_AND_FINISH();
11848 IEM_MC_END();
11849 }
11850 else
11851 {
11852 /*
11853 * Register, memory.
11854 */
11855 IEM_MC_BEGIN(3, 2);
11856 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11857 IEM_MC_ARG(uint16_t, u16Src, 1);
11858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11859
11860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11861 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11862 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11865 IEM_MC_PREPARE_SSE_USAGE();
11866
11867 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11868 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11869 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11870 IEM_MC_ADVANCE_RIP_AND_FINISH();
11871 IEM_MC_END();
11872 }
11873}
11874
11875
11876/* Opcode 0xf3 0x0f 0xc4 - invalid */
11877/* Opcode 0xf2 0x0f 0xc4 - invalid */
11878
11879
11880/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11881FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11882{
11883 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
11884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11885 if (IEM_IS_MODRM_REG_MODE(bRm))
11886 {
11887 /*
11888 * Greg32, MMX, imm8.
11889 */
11890 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11891 IEM_MC_BEGIN(3, 1);
11892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11893 IEM_MC_LOCAL(uint16_t, u16Dst);
11894 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11895 IEM_MC_ARG(uint64_t, u64Src, 1);
11896 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11897 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11898 IEM_MC_PREPARE_FPU_USAGE();
11899 IEM_MC_FPU_TO_MMX_MODE();
11900 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
11901 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
11902 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11903 IEM_MC_ADVANCE_RIP_AND_FINISH();
11904 IEM_MC_END();
11905 }
11906 /* No memory operand. */
11907 else
11908 IEMOP_RAISE_INVALID_OPCODE_RET();
11909}
11910
11911
11912/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11913FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11914{
11915 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11917 if (IEM_IS_MODRM_REG_MODE(bRm))
11918 {
11919 /*
11920 * Greg32, XMM, imm8.
11921 */
11922 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11923 IEM_MC_BEGIN(3, 1);
11924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11925 IEM_MC_LOCAL(uint16_t, u16Dst);
11926 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11927 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11928 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11929 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11930 IEM_MC_PREPARE_SSE_USAGE();
11931 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11932 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
11933 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11934 IEM_MC_ADVANCE_RIP_AND_FINISH();
11935 IEM_MC_END();
11936 }
11937 /* No memory operand. */
11938 else
11939 IEMOP_RAISE_INVALID_OPCODE_RET();
11940}
11941
11942
11943/* Opcode 0xf3 0x0f 0xc5 - invalid */
11944/* Opcode 0xf2 0x0f 0xc5 - invalid */
11945
11946
11947/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11948FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11949{
11950 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11952 if (IEM_IS_MODRM_REG_MODE(bRm))
11953 {
11954 /*
11955 * XMM, XMM, imm8.
11956 */
11957 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11958 IEM_MC_BEGIN(3, 0);
11959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11960 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11961 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11962 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11964 IEM_MC_PREPARE_SSE_USAGE();
11965 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11966 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11967 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
11968 IEM_MC_ADVANCE_RIP_AND_FINISH();
11969 IEM_MC_END();
11970 }
11971 else
11972 {
11973 /*
11974 * XMM, [mem128], imm8.
11975 */
11976 IEM_MC_BEGIN(3, 2);
11977 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11978 IEM_MC_LOCAL(RTUINT128U, uSrc);
11979 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11981
11982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11983 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11984 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11986 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11987 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11988
11989 IEM_MC_PREPARE_SSE_USAGE();
11990 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11991 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
11992
11993 IEM_MC_ADVANCE_RIP_AND_FINISH();
11994 IEM_MC_END();
11995 }
11996}
11997
11998
11999/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12000FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12001{
12002 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12004 if (IEM_IS_MODRM_REG_MODE(bRm))
12005 {
12006 /*
12007 * XMM, XMM, imm8.
12008 */
12009 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12010 IEM_MC_BEGIN(3, 0);
12011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12012 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12013 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12014 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12015 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12016 IEM_MC_PREPARE_SSE_USAGE();
12017 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12018 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12019 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12020 IEM_MC_ADVANCE_RIP_AND_FINISH();
12021 IEM_MC_END();
12022 }
12023 else
12024 {
12025 /*
12026 * XMM, [mem128], imm8.
12027 */
12028 IEM_MC_BEGIN(3, 2);
12029 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12030 IEM_MC_LOCAL(RTUINT128U, uSrc);
12031 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12033
12034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12035 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12036 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12038 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12039 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12040
12041 IEM_MC_PREPARE_SSE_USAGE();
12042 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12043 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12044
12045 IEM_MC_ADVANCE_RIP_AND_FINISH();
12046 IEM_MC_END();
12047 }
12048}
12049
12050
12051/* Opcode 0xf3 0x0f 0xc6 - invalid */
12052/* Opcode 0xf2 0x0f 0xc6 - invalid */
12053
12054
12055/** Opcode 0x0f 0xc7 !11/1. */
12056FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12057{
12058 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12059
12060 IEM_MC_BEGIN(4, 3);
12061 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12062 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12063 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12064 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12065 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12066 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12068
12069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12070 IEMOP_HLP_DONE_DECODING();
12071 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12072
12073 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12074 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12075 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12076
12077 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12078 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12079 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12080
12081 IEM_MC_FETCH_EFLAGS(EFlags);
12082 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12083 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12084 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12085 else
12086 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12087
12088 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12089 IEM_MC_COMMIT_EFLAGS(EFlags);
12090 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12091 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12092 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12093 } IEM_MC_ENDIF();
12094 IEM_MC_ADVANCE_RIP_AND_FINISH();
12095
12096 IEM_MC_END();
12097}
12098
12099
12100/** Opcode REX.W 0x0f 0xc7 !11/1. */
12101FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12102{
12103 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12104 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12105 {
12106 IEM_MC_BEGIN(4, 3);
12107 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12108 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12109 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12110 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12111 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12112 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12114
12115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12116 IEMOP_HLP_DONE_DECODING();
12117 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12118 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12119
12120 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12121 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12122 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12123
12124 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12125 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12126 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12127
12128 IEM_MC_FETCH_EFLAGS(EFlags);
12129
12130#ifdef RT_ARCH_AMD64 /* some code duplication here because IEMAllInstPython.py cannot parse if/else/#if spaghetti. */
12131 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12132 {
12133 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12134 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12135 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12136 else
12137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12138 }
12139 else
12140 { /* (see comments in #else case below) */
12141 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12142 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12143 else
12144 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12145 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12146 }
12147
12148#elif defined(RT_ARCH_ARM64)
12149 /** @todo may require fallback for unaligned accesses... */
12150 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12151 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12152 else
12153 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12154
12155#else
12156 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12157 accesses and not all all atomic, which works fine on in UNI CPU guest
12158 configuration (ignoring DMA). If guest SMP is active we have no choice
12159 but to use a rendezvous callback here. Sigh. */
12160 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12161 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12162 else
12163 {
12164 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12165 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12166 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12167 }
12168#endif
12169
12170 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12171 IEM_MC_COMMIT_EFLAGS(EFlags);
12172 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12173 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12174 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12175 } IEM_MC_ENDIF();
12176 IEM_MC_ADVANCE_RIP_AND_FINISH();
12177
12178 IEM_MC_END();
12179 }
12180 Log(("cmpxchg16b -> #UD\n"));
12181 IEMOP_RAISE_INVALID_OPCODE_RET();
12182}
12183
12184FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12185{
12186 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12187 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12188 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12189}
12190
12191
12192/** Opcode 0x0f 0xc7 11/6. */
12193FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12194{
12195 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12196 IEMOP_RAISE_INVALID_OPCODE_RET();
12197
12198 if (IEM_IS_MODRM_REG_MODE(bRm))
12199 {
12200 /* register destination. */
12201 switch (pVCpu->iem.s.enmEffOpSize)
12202 {
12203 case IEMMODE_16BIT:
12204 IEM_MC_BEGIN(2, 0);
12205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12206 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12207 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12208
12209 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12210 IEM_MC_REF_EFLAGS(pEFlags);
12211 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback),
12212 pu16Dst, pEFlags);
12213
12214 IEM_MC_ADVANCE_RIP_AND_FINISH();
12215 IEM_MC_END();
12216 break;
12217
12218 case IEMMODE_32BIT:
12219 IEM_MC_BEGIN(2, 0);
12220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12221 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12222 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12223
12224 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12225 IEM_MC_REF_EFLAGS(pEFlags);
12226 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback),
12227 pu32Dst, pEFlags);
12228
12229 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12230 IEM_MC_ADVANCE_RIP_AND_FINISH();
12231 IEM_MC_END();
12232 break;
12233
12234 case IEMMODE_64BIT:
12235 IEM_MC_BEGIN(2, 0);
12236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12237 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12238 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12239
12240 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12241 IEM_MC_REF_EFLAGS(pEFlags);
12242 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback),
12243 pu64Dst, pEFlags);
12244
12245 IEM_MC_ADVANCE_RIP_AND_FINISH();
12246 IEM_MC_END();
12247 break;
12248
12249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12250 }
12251 }
12252 /* Register only. */
12253 else
12254 IEMOP_RAISE_INVALID_OPCODE_RET();
12255}
12256
12257/** Opcode 0x0f 0xc7 !11/6. */
12258#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12259FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12260{
12261 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12262 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12263 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12264 IEM_MC_BEGIN(2, 0);
12265 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12266 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12268 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12269 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12270 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12271 IEM_MC_END();
12272}
12273#else
12274FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12275#endif
12276
12277/** Opcode 0x66 0x0f 0xc7 !11/6. */
12278#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12279FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12280{
12281 IEMOP_MNEMONIC(vmclear, "vmclear");
12282 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12283 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12284 IEM_MC_BEGIN(2, 0);
12285 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12286 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12288 IEMOP_HLP_DONE_DECODING();
12289 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12290 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12291 IEM_MC_END();
12292}
12293#else
12294FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12295#endif
12296
12297/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12298#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12299FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12300{
12301 IEMOP_MNEMONIC(vmxon, "vmxon");
12302 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12303 IEM_MC_BEGIN(2, 0);
12304 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12305 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12307 IEMOP_HLP_DONE_DECODING();
12308 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12309 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12310 IEM_MC_END();
12311}
12312#else
12313FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12314#endif
12315
12316/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12317#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12318FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12319{
12320 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12321 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12322 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12323 IEM_MC_BEGIN(2, 0);
12324 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12325 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12327 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12328 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12329 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12330 IEM_MC_END();
12331}
12332#else
12333FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12334#endif
12335
12336/** Opcode 0x0f 0xc7 11/7. */
12337FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12338{
12339 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12340 IEMOP_RAISE_INVALID_OPCODE_RET();
12341
12342 if (IEM_IS_MODRM_REG_MODE(bRm))
12343 {
12344 /* register destination. */
12345 switch (pVCpu->iem.s.enmEffOpSize)
12346 {
12347 case IEMMODE_16BIT:
12348 IEM_MC_BEGIN(2, 0);
12349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12351 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12352
12353 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12354 IEM_MC_REF_EFLAGS(pEFlags);
12355 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback),
12356 pu16Dst, pEFlags);
12357
12358 IEM_MC_ADVANCE_RIP_AND_FINISH();
12359 IEM_MC_END();
12360 break;
12361
12362 case IEMMODE_32BIT:
12363 IEM_MC_BEGIN(2, 0);
12364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12365 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12366 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12367
12368 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12369 IEM_MC_REF_EFLAGS(pEFlags);
12370 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback),
12371 pu32Dst, pEFlags);
12372
12373 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12374 IEM_MC_ADVANCE_RIP_AND_FINISH();
12375 IEM_MC_END();
12376 break;
12377
12378 case IEMMODE_64BIT:
12379 IEM_MC_BEGIN(2, 0);
12380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12381 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12382 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12383
12384 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12385 IEM_MC_REF_EFLAGS(pEFlags);
12386 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback),
12387 pu64Dst, pEFlags);
12388
12389 IEM_MC_ADVANCE_RIP_AND_FINISH();
12390 IEM_MC_END();
12391 break;
12392
12393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12394 }
12395 }
12396 /* Register only. */
12397 else
12398 IEMOP_RAISE_INVALID_OPCODE_RET();
12399}
12400
12401/**
12402 * Group 9 jump table for register variant.
12403 */
12404IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12405{ /* pfx: none, 066h, 0f3h, 0f2h */
12406 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12407 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12408 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12409 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12410 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12411 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12412 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12413 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12414};
12415AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12416
12417
12418/**
12419 * Group 9 jump table for memory variant.
12420 */
12421IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12422{ /* pfx: none, 066h, 0f3h, 0f2h */
12423 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12424 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12425 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12426 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12427 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12428 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12429 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12430 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12431};
12432AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12433
12434
12435/** Opcode 0x0f 0xc7. */
12436FNIEMOP_DEF(iemOp_Grp9)
12437{
12438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12439 if (IEM_IS_MODRM_REG_MODE(bRm))
12440 /* register, register */
12441 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12442 + pVCpu->iem.s.idxPrefix], bRm);
12443 /* memory, register */
12444 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12445 + pVCpu->iem.s.idxPrefix], bRm);
12446}
12447
12448
12449/**
12450 * Common 'bswap register' helper.
12451 */
12452FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12453{
12454 switch (pVCpu->iem.s.enmEffOpSize)
12455 {
12456 case IEMMODE_16BIT:
12457 IEM_MC_BEGIN(1, 0);
12458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12459 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12460 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12461 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12462 IEM_MC_ADVANCE_RIP_AND_FINISH();
12463 IEM_MC_END();
12464 break;
12465
12466 case IEMMODE_32BIT:
12467 IEM_MC_BEGIN(1, 0);
12468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12469 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12470 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12471 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12472 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12473 IEM_MC_ADVANCE_RIP_AND_FINISH();
12474 IEM_MC_END();
12475 break;
12476
12477 case IEMMODE_64BIT:
12478 IEM_MC_BEGIN(1, 0);
12479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12480 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12481 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12482 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12483 IEM_MC_ADVANCE_RIP_AND_FINISH();
12484 IEM_MC_END();
12485 break;
12486
12487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12488 }
12489}
12490
12491
12492/** Opcode 0x0f 0xc8. */
12493FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12494{
12495 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12496 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12497 prefix. REX.B is the correct prefix it appears. For a parallel
12498 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12499 IEMOP_HLP_MIN_486();
12500 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12501}
12502
12503
12504/** Opcode 0x0f 0xc9. */
12505FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12506{
12507 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12508 IEMOP_HLP_MIN_486();
12509 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12510}
12511
12512
12513/** Opcode 0x0f 0xca. */
12514FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12515{
12516 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12517 IEMOP_HLP_MIN_486();
12518 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12519}
12520
12521
12522/** Opcode 0x0f 0xcb. */
12523FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12524{
12525 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12526 IEMOP_HLP_MIN_486();
12527 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12528}
12529
12530
12531/** Opcode 0x0f 0xcc. */
12532FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12533{
12534 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12535 IEMOP_HLP_MIN_486();
12536 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12537}
12538
12539
12540/** Opcode 0x0f 0xcd. */
12541FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12542{
12543 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12544 IEMOP_HLP_MIN_486();
12545 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12546}
12547
12548
12549/** Opcode 0x0f 0xce. */
12550FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12551{
12552 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12553 IEMOP_HLP_MIN_486();
12554 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12555}
12556
12557
12558/** Opcode 0x0f 0xcf. */
12559FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12560{
12561 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12562 IEMOP_HLP_MIN_486();
12563 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12564}
12565
12566
12567/* Opcode 0x0f 0xd0 - invalid */
12568
12569
12570/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12571FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12572{
12573 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12574 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12575}
12576
12577
12578/* Opcode 0xf3 0x0f 0xd0 - invalid */
12579
12580
12581/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12582FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12583{
12584 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12585 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12586}
12587
12588
12589
12590/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12591FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12592{
12593 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12594 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12595}
12596
12597/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12598FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12599{
12600 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12601 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12602}
12603
12604/* Opcode 0xf3 0x0f 0xd1 - invalid */
12605/* Opcode 0xf2 0x0f 0xd1 - invalid */
12606
12607/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12608FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12609{
12610 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12611 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12612}
12613
12614
12615/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12616FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12617{
12618 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12619 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12620}
12621
12622
12623/* Opcode 0xf3 0x0f 0xd2 - invalid */
12624/* Opcode 0xf2 0x0f 0xd2 - invalid */
12625
12626/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12627FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12628{
12629 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12630 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12631}
12632
12633
12634/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12635FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12636{
12637 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12638 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12639}
12640
12641
12642/* Opcode 0xf3 0x0f 0xd3 - invalid */
12643/* Opcode 0xf2 0x0f 0xd3 - invalid */
12644
12645
12646/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12647FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12648{
12649 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12650 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12651}
12652
12653
12654/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12655FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12656{
12657 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12658 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12659}
12660
12661
12662/* Opcode 0xf3 0x0f 0xd4 - invalid */
12663/* Opcode 0xf2 0x0f 0xd4 - invalid */
12664
12665/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12666FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12667{
12668 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12669 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12670}
12671
12672/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12673FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12674{
12675 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12676 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12677}
12678
12679
12680/* Opcode 0xf3 0x0f 0xd5 - invalid */
12681/* Opcode 0xf2 0x0f 0xd5 - invalid */
12682
12683/* Opcode 0x0f 0xd6 - invalid */
12684
12685/**
12686 * @opcode 0xd6
12687 * @oppfx 0x66
12688 * @opcpuid sse2
12689 * @opgroup og_sse2_pcksclr_datamove
12690 * @opxcpttype none
12691 * @optest op1=-1 op2=2 -> op1=2
12692 * @optest op1=0 op2=-42 -> op1=-42
12693 */
12694FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12695{
12696 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12698 if (IEM_IS_MODRM_REG_MODE(bRm))
12699 {
12700 /*
12701 * Register, register.
12702 */
12703 IEM_MC_BEGIN(0, 2);
12704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12705 IEM_MC_LOCAL(uint64_t, uSrc);
12706
12707 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12708 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12709
12710 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12711 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12712
12713 IEM_MC_ADVANCE_RIP_AND_FINISH();
12714 IEM_MC_END();
12715 }
12716 else
12717 {
12718 /*
12719 * Memory, register.
12720 */
12721 IEM_MC_BEGIN(0, 2);
12722 IEM_MC_LOCAL(uint64_t, uSrc);
12723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12724
12725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12727 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12728 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12729
12730 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12731 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12732
12733 IEM_MC_ADVANCE_RIP_AND_FINISH();
12734 IEM_MC_END();
12735 }
12736}
12737
12738
12739/**
12740 * @opcode 0xd6
12741 * @opcodesub 11 mr/reg
12742 * @oppfx f3
12743 * @opcpuid sse2
12744 * @opgroup og_sse2_simdint_datamove
12745 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12746 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12747 */
12748FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12749{
12750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12751 if (IEM_IS_MODRM_REG_MODE(bRm))
12752 {
12753 /*
12754 * Register, register.
12755 */
12756 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12757 IEM_MC_BEGIN(0, 1);
12758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12759 IEM_MC_LOCAL(uint64_t, uSrc);
12760
12761 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12762 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12763 IEM_MC_FPU_TO_MMX_MODE();
12764
12765 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12766 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12767
12768 IEM_MC_ADVANCE_RIP_AND_FINISH();
12769 IEM_MC_END();
12770 }
12771
12772 /**
12773 * @opdone
12774 * @opmnemonic udf30fd6mem
12775 * @opcode 0xd6
12776 * @opcodesub !11 mr/reg
12777 * @oppfx f3
12778 * @opunused intel-modrm
12779 * @opcpuid sse
12780 * @optest ->
12781 */
12782 else
12783 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12784}
12785
12786
12787/**
12788 * @opcode 0xd6
12789 * @opcodesub 11 mr/reg
12790 * @oppfx f2
12791 * @opcpuid sse2
12792 * @opgroup og_sse2_simdint_datamove
12793 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12794 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12795 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12796 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12797 * @optest op1=-42 op2=0xfedcba9876543210
12798 * -> op1=0xfedcba9876543210 ftw=0xff
12799 */
12800FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12801{
12802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12803 if (IEM_IS_MODRM_REG_MODE(bRm))
12804 {
12805 /*
12806 * Register, register.
12807 */
12808 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12809 IEM_MC_BEGIN(0, 1);
12810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12811 IEM_MC_LOCAL(uint64_t, uSrc);
12812
12813 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12814 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12815 IEM_MC_FPU_TO_MMX_MODE();
12816
12817 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
12818 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12819
12820 IEM_MC_ADVANCE_RIP_AND_FINISH();
12821 IEM_MC_END();
12822 }
12823
12824 /**
12825 * @opdone
12826 * @opmnemonic udf20fd6mem
12827 * @opcode 0xd6
12828 * @opcodesub !11 mr/reg
12829 * @oppfx f2
12830 * @opunused intel-modrm
12831 * @opcpuid sse
12832 * @optest ->
12833 */
12834 else
12835 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12836}
12837
12838
12839/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12840FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12841{
12842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12843 /* Docs says register only. */
12844 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12845 {
12846 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12847 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
12848 IEM_MC_BEGIN(2, 0);
12849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12850 IEM_MC_ARG(uint64_t *, puDst, 0);
12851 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12852 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12853 IEM_MC_PREPARE_FPU_USAGE();
12854 IEM_MC_FPU_TO_MMX_MODE();
12855
12856 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12857 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12858 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12859
12860 IEM_MC_ADVANCE_RIP_AND_FINISH();
12861 IEM_MC_END();
12862 }
12863 else
12864 IEMOP_RAISE_INVALID_OPCODE_RET();
12865}
12866
12867
12868/** Opcode 0x66 0x0f 0xd7 - */
12869FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12870{
12871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12872 /* Docs says register only. */
12873 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12874 {
12875 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12876 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
12877 IEM_MC_BEGIN(2, 0);
12878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12879 IEM_MC_ARG(uint64_t *, puDst, 0);
12880 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12881 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12882 IEM_MC_PREPARE_SSE_USAGE();
12883 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12884 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12885 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12886 IEM_MC_ADVANCE_RIP_AND_FINISH();
12887 IEM_MC_END();
12888 }
12889 else
12890 IEMOP_RAISE_INVALID_OPCODE_RET();
12891}
12892
12893
12894/* Opcode 0xf3 0x0f 0xd7 - invalid */
12895/* Opcode 0xf2 0x0f 0xd7 - invalid */
12896
12897
12898/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12899FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12900{
12901 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12902 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12903}
12904
12905
12906/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12907FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12908{
12909 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12910 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12911}
12912
12913
12914/* Opcode 0xf3 0x0f 0xd8 - invalid */
12915/* Opcode 0xf2 0x0f 0xd8 - invalid */
12916
12917/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12918FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12919{
12920 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12921 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12922}
12923
12924
12925/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12926FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12927{
12928 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12929 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12930}
12931
12932
12933/* Opcode 0xf3 0x0f 0xd9 - invalid */
12934/* Opcode 0xf2 0x0f 0xd9 - invalid */
12935
12936/** Opcode 0x0f 0xda - pminub Pq, Qq */
12937FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12938{
12939 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12940 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12941}
12942
12943
12944/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12945FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12946{
12947 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12948 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12949}
12950
12951/* Opcode 0xf3 0x0f 0xda - invalid */
12952/* Opcode 0xf2 0x0f 0xda - invalid */
12953
12954/** Opcode 0x0f 0xdb - pand Pq, Qq */
12955FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12956{
12957 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12958 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12959}
12960
12961
12962/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12963FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12964{
12965 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12966 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12967}
12968
12969
12970/* Opcode 0xf3 0x0f 0xdb - invalid */
12971/* Opcode 0xf2 0x0f 0xdb - invalid */
12972
12973/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12974FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12975{
12976 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12977 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12978}
12979
12980
12981/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12982FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12983{
12984 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12985 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12986}
12987
12988
12989/* Opcode 0xf3 0x0f 0xdc - invalid */
12990/* Opcode 0xf2 0x0f 0xdc - invalid */
12991
12992/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12993FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12994{
12995 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12996 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12997}
12998
12999
13000/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13001FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13002{
13003 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13004 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13005}
13006
13007
13008/* Opcode 0xf3 0x0f 0xdd - invalid */
13009/* Opcode 0xf2 0x0f 0xdd - invalid */
13010
13011/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13012FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13013{
13014 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13015 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13016}
13017
13018
13019/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13020FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13021{
13022 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13023 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13024}
13025
13026/* Opcode 0xf3 0x0f 0xde - invalid */
13027/* Opcode 0xf2 0x0f 0xde - invalid */
13028
13029
13030/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13031FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13032{
13033 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13034 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13035}
13036
13037
13038/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13039FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13040{
13041 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13042 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13043}
13044
13045
13046/* Opcode 0xf3 0x0f 0xdf - invalid */
13047/* Opcode 0xf2 0x0f 0xdf - invalid */
13048
13049/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13050FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13051{
13052 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13053 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13054}
13055
13056
13057/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13058FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13059{
13060 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13061 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13062}
13063
13064
13065/* Opcode 0xf3 0x0f 0xe0 - invalid */
13066/* Opcode 0xf2 0x0f 0xe0 - invalid */
13067
13068/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13069FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13070{
13071 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13072 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13073}
13074
13075
13076/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13077FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13078{
13079 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13080 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13081}
13082
13083
13084/* Opcode 0xf3 0x0f 0xe1 - invalid */
13085/* Opcode 0xf2 0x0f 0xe1 - invalid */
13086
13087/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13088FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13089{
13090 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13091 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13092}
13093
13094
13095/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13096FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13097{
13098 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13099 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13100}
13101
13102
13103/* Opcode 0xf3 0x0f 0xe2 - invalid */
13104/* Opcode 0xf2 0x0f 0xe2 - invalid */
13105
13106/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13107FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13108{
13109 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13110 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13111}
13112
13113
13114/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13115FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13116{
13117 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13118 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13119}
13120
13121
13122/* Opcode 0xf3 0x0f 0xe3 - invalid */
13123/* Opcode 0xf2 0x0f 0xe3 - invalid */
13124
13125/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13126FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13127{
13128 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13129 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13130}
13131
13132
13133/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13134FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13135{
13136 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13137 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13138}
13139
13140
13141/* Opcode 0xf3 0x0f 0xe4 - invalid */
13142/* Opcode 0xf2 0x0f 0xe4 - invalid */
13143
13144/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13145FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13146{
13147 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13148 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13149}
13150
13151
13152/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13153FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13154{
13155 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13156 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13157}
13158
13159
13160/* Opcode 0xf3 0x0f 0xe5 - invalid */
13161/* Opcode 0xf2 0x0f 0xe5 - invalid */
13162/* Opcode 0x0f 0xe6 - invalid */
13163
13164
13165/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13166FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13167{
13168 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13169 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13170}
13171
13172
13173/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13174FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13175{
13176 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13177 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13178}
13179
13180
13181/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13182FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13183{
13184 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13185 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13186}
13187
13188
13189/**
13190 * @opcode 0xe7
13191 * @opcodesub !11 mr/reg
13192 * @oppfx none
13193 * @opcpuid sse
13194 * @opgroup og_sse1_cachect
13195 * @opxcpttype none
13196 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13197 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13198 */
13199FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13200{
13201 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13203 if (IEM_IS_MODRM_MEM_MODE(bRm))
13204 {
13205 /* Register, memory. */
13206 IEM_MC_BEGIN(0, 2);
13207 IEM_MC_LOCAL(uint64_t, uSrc);
13208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13209
13210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13212 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13213 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13214 IEM_MC_FPU_TO_MMX_MODE();
13215
13216 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13217 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13218
13219 IEM_MC_ADVANCE_RIP_AND_FINISH();
13220 IEM_MC_END();
13221 }
13222 /**
13223 * @opdone
13224 * @opmnemonic ud0fe7reg
13225 * @opcode 0xe7
13226 * @opcodesub 11 mr/reg
13227 * @oppfx none
13228 * @opunused immediate
13229 * @opcpuid sse
13230 * @optest ->
13231 */
13232 else
13233 IEMOP_RAISE_INVALID_OPCODE_RET();
13234}
13235
13236/**
13237 * @opcode 0xe7
13238 * @opcodesub !11 mr/reg
13239 * @oppfx 0x66
13240 * @opcpuid sse2
13241 * @opgroup og_sse2_cachect
13242 * @opxcpttype 1
13243 * @optest op1=-1 op2=2 -> op1=2
13244 * @optest op1=0 op2=-42 -> op1=-42
13245 */
13246FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13247{
13248 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13250 if (IEM_IS_MODRM_MEM_MODE(bRm))
13251 {
13252 /* Register, memory. */
13253 IEM_MC_BEGIN(0, 2);
13254 IEM_MC_LOCAL(RTUINT128U, uSrc);
13255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13256
13257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13259 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13260 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13261
13262 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13263 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13264
13265 IEM_MC_ADVANCE_RIP_AND_FINISH();
13266 IEM_MC_END();
13267 }
13268
13269 /**
13270 * @opdone
13271 * @opmnemonic ud660fe7reg
13272 * @opcode 0xe7
13273 * @opcodesub 11 mr/reg
13274 * @oppfx 0x66
13275 * @opunused immediate
13276 * @opcpuid sse
13277 * @optest ->
13278 */
13279 else
13280 IEMOP_RAISE_INVALID_OPCODE_RET();
13281}
13282
13283/* Opcode 0xf3 0x0f 0xe7 - invalid */
13284/* Opcode 0xf2 0x0f 0xe7 - invalid */
13285
13286
13287/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13288FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13289{
13290 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13291 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13292}
13293
13294
13295/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13296FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13297{
13298 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13299 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13300}
13301
13302
13303/* Opcode 0xf3 0x0f 0xe8 - invalid */
13304/* Opcode 0xf2 0x0f 0xe8 - invalid */
13305
13306/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13307FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13308{
13309 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13310 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13311}
13312
13313
13314/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13315FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13316{
13317 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13318 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13319}
13320
13321
13322/* Opcode 0xf3 0x0f 0xe9 - invalid */
13323/* Opcode 0xf2 0x0f 0xe9 - invalid */
13324
13325
13326/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13327FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13328{
13329 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13330 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13331}
13332
13333
13334/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13335FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13336{
13337 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13338 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13339}
13340
13341
13342/* Opcode 0xf3 0x0f 0xea - invalid */
13343/* Opcode 0xf2 0x0f 0xea - invalid */
13344
13345
13346/** Opcode 0x0f 0xeb - por Pq, Qq */
13347FNIEMOP_DEF(iemOp_por_Pq_Qq)
13348{
13349 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13350 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13351}
13352
13353
13354/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13355FNIEMOP_DEF(iemOp_por_Vx_Wx)
13356{
13357 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13358 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13359}
13360
13361
13362/* Opcode 0xf3 0x0f 0xeb - invalid */
13363/* Opcode 0xf2 0x0f 0xeb - invalid */
13364
13365/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13366FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13367{
13368 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13369 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13370}
13371
13372
13373/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13374FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13375{
13376 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13377 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13378}
13379
13380
13381/* Opcode 0xf3 0x0f 0xec - invalid */
13382/* Opcode 0xf2 0x0f 0xec - invalid */
13383
13384/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13385FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13386{
13387 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13388 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13389}
13390
13391
13392/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13393FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13394{
13395 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13396 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13397}
13398
13399
13400/* Opcode 0xf3 0x0f 0xed - invalid */
13401/* Opcode 0xf2 0x0f 0xed - invalid */
13402
13403
13404/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13405FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13406{
13407 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13408 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13409}
13410
13411
13412/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13413FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13414{
13415 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13416 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13417}
13418
13419
13420/* Opcode 0xf3 0x0f 0xee - invalid */
13421/* Opcode 0xf2 0x0f 0xee - invalid */
13422
13423
13424/** Opcode 0x0f 0xef - pxor Pq, Qq */
13425FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13426{
13427 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13428 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13429}
13430
13431
13432/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13433FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13434{
13435 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13436 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13437}
13438
13439
13440/* Opcode 0xf3 0x0f 0xef - invalid */
13441/* Opcode 0xf2 0x0f 0xef - invalid */
13442
13443/* Opcode 0x0f 0xf0 - invalid */
13444/* Opcode 0x66 0x0f 0xf0 - invalid */
13445
13446
13447/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13448FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13449{
13450 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13452 if (IEM_IS_MODRM_REG_MODE(bRm))
13453 {
13454 /*
13455 * Register, register - (not implemented, assuming it raises \#UD).
13456 */
13457 IEMOP_RAISE_INVALID_OPCODE_RET();
13458 }
13459 else
13460 {
13461 /*
13462 * Register, memory.
13463 */
13464 IEM_MC_BEGIN(0, 2);
13465 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13467
13468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13470 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13471 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13472 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13473 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13474
13475 IEM_MC_ADVANCE_RIP_AND_FINISH();
13476 IEM_MC_END();
13477 }
13478}
13479
13480
13481/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13482FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13483{
13484 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13485 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13486}
13487
13488
13489/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13490FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13491{
13492 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13493 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13494}
13495
13496
13497/* Opcode 0xf2 0x0f 0xf1 - invalid */
13498
13499/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13500FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13501{
13502 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13503 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13504}
13505
13506
13507/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13508FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13509{
13510 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13511 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13512}
13513
13514
13515/* Opcode 0xf2 0x0f 0xf2 - invalid */
13516
13517/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13518FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13519{
13520 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13521 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13522}
13523
13524
13525/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13526FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13527{
13528 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13529 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13530}
13531
13532/* Opcode 0xf2 0x0f 0xf3 - invalid */
13533
13534/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13535FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13536{
13537 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13538 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13539}
13540
13541
13542/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13543FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13544{
13545 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13546 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13547}
13548
13549
13550/* Opcode 0xf2 0x0f 0xf4 - invalid */
13551
13552/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13553FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13554{
13555 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13556 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13557}
13558
13559
13560/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13561FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13562{
13563 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13564 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13565}
13566
13567/* Opcode 0xf2 0x0f 0xf5 - invalid */
13568
13569/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13570FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13571{
13572 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13573 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13574}
13575
13576
13577/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13578FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13579{
13580 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13581 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13582}
13583
13584
13585/* Opcode 0xf2 0x0f 0xf6 - invalid */
13586
13587/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13588FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13589/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13590FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13591/* Opcode 0xf2 0x0f 0xf7 - invalid */
13592
13593
13594/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13595FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13596{
13597 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13598 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13599}
13600
13601
13602/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13603FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13604{
13605 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13606 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13607}
13608
13609
13610/* Opcode 0xf2 0x0f 0xf8 - invalid */
13611
13612
13613/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13614FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13615{
13616 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13617 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13618}
13619
13620
13621/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13622FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13623{
13624 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13625 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13626}
13627
13628
13629/* Opcode 0xf2 0x0f 0xf9 - invalid */
13630
13631
13632/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13633FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13634{
13635 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13636 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13637}
13638
13639
13640/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13641FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13642{
13643 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13644 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13645}
13646
13647
13648/* Opcode 0xf2 0x0f 0xfa - invalid */
13649
13650
13651/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13652FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13653{
13654 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13655 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13656}
13657
13658
13659/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13660FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13661{
13662 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13663 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13664}
13665
13666
13667/* Opcode 0xf2 0x0f 0xfb - invalid */
13668
13669
13670/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13671FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13672{
13673 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13674 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13675}
13676
13677
13678/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13679FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13680{
13681 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13682 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13683}
13684
13685
13686/* Opcode 0xf2 0x0f 0xfc - invalid */
13687
13688
13689/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13690FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13691{
13692 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13693 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13694}
13695
13696
13697/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13698FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13699{
13700 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13701 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13702}
13703
13704
13705/* Opcode 0xf2 0x0f 0xfd - invalid */
13706
13707
13708/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13709FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13710{
13711 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13712 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13713}
13714
13715
13716/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13717FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13718{
13719 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13720 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13721}
13722
13723
13724/* Opcode 0xf2 0x0f 0xfe - invalid */
13725
13726
13727/** Opcode **** 0x0f 0xff - UD0 */
13728FNIEMOP_DEF(iemOp_ud0)
13729{
13730 IEMOP_MNEMONIC(ud0, "ud0");
13731 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13732 {
13733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13734 if (IEM_IS_MODRM_MEM_MODE(bRm))
13735 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
13736 }
13737 IEMOP_HLP_DONE_DECODING();
13738 IEMOP_RAISE_INVALID_OPCODE_RET();
13739}
13740
13741
13742
13743/**
13744 * Two byte opcode map, first byte 0x0f.
13745 *
13746 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13747 * check if it needs updating as well when making changes.
13748 */
13749const PFNIEMOP g_apfnTwoByteMap[] =
13750{
13751 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13752 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13753 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13754 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13755 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13756 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13757 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13758 /* 0x06 */ IEMOP_X4(iemOp_clts),
13759 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13760 /* 0x08 */ IEMOP_X4(iemOp_invd),
13761 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13762 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13763 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13764 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13765 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13766 /* 0x0e */ IEMOP_X4(iemOp_femms),
13767 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13768
13769 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13770 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13771 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13772 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13773 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13774 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13775 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13776 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13777 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13778 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13779 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13780 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13781 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13782 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13783 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13784 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13785
13786 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13787 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13788 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13789 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13790 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13791 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13792 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13793 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13794 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13795 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13796 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13797 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13798 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13799 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13800 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13801 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13802
13803 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13804 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13805 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13806 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13807 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13808 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13809 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13810 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13811 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13812 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13813 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13814 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13815 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13816 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13817 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13818 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13819
13820 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13821 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13822 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13823 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13824 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13825 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13826 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13827 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13828 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13829 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13830 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13831 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13832 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13833 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13834 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13835 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13836
13837 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13838 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13839 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13840 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13841 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13842 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13843 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13844 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13845 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13846 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13847 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13848 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13849 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13850 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13851 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13852 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13853
13854 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13855 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13856 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13857 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13858 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13859 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13860 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13861 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13862 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13863 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13864 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13865 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13866 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13867 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13868 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13869 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13870
13871 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13872 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13873 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13874 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13875 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13876 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13877 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13878 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13879
13880 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13881 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13882 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13883 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13884 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13885 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13886 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13887 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13888
13889 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13890 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13891 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13892 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13893 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13894 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13895 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13896 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13897 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13898 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13899 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13900 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13901 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13902 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13903 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13904 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13905
13906 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13907 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13908 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13909 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13910 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13911 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13912 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13913 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13914 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13915 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13916 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13917 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13918 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13919 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13920 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13921 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13922
13923 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13924 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13925 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13926 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13927 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13928 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13929 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13930 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13931 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13932 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13933 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13934 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13935 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13936 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13937 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13938 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13939
13940 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13941 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13942 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13943 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13944 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13945 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13946 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13947 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13948 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13949 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13950 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13951 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13952 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13953 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13954 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13955 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13956
13957 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13958 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13959 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13960 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13961 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13962 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13963 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13964 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13965 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13966 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13967 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13968 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13969 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13970 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13971 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13972 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13973
13974 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13975 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13976 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13977 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13978 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13979 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13980 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13981 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13982 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13983 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13984 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13985 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13986 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13987 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13988 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13989 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13990
13991 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13992 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13993 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13994 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13995 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13996 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13997 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13998 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13999 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14000 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14001 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14002 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14003 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14004 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14005 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14006 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14007
14008 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14009 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14010 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14011 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14012 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14013 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14014 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14015 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14016 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14017 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14018 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14019 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14020 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14021 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14022 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14023 /* 0xff */ IEMOP_X4(iemOp_ud0),
14024};
14025AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14026
14027/** @} */
14028
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette