VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 100733

Last change on this file since 100733 was 100733, checked in by vboxsync, 21 months ago

VMM/IEM,ValKit: Shortened the IEMAllInstruction* file names to IEMAllInst*. This makes it easier to see the distinguishing bits of the name in the narrow buffer list widget here (this has been driving me nuts for months). Sorry for any conflicts this causes. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 503.4 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 100733 2023-07-28 22:51:16Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1229 }
1230
1231 /* Ignore operand size here, memory refs are always 16-bit. */
1232 IEM_MC_BEGIN(2, 0);
1233 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1238 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEM_MC_BEGIN(1, 0);
1253 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEM_MC_BEGIN(1, 0);
1284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEM_MC_BEGIN(2, 0);
1314 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 IEMOP_HLP_MIN_286();
1341 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1342}
1343
1344
1345/** Opcode 0x0f 0x00 /5. */
1346FNIEMOPRM_DEF(iemOp_Grp6_verw)
1347{
1348 IEMOP_MNEMONIC(verw, "verw Ew");
1349 IEMOP_HLP_MIN_286();
1350 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1351}
1352
1353
1354/**
1355 * Group 6 jump table.
1356 */
1357IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1358{
1359 iemOp_Grp6_sldt,
1360 iemOp_Grp6_str,
1361 iemOp_Grp6_lldt,
1362 iemOp_Grp6_ltr,
1363 iemOp_Grp6_verr,
1364 iemOp_Grp6_verw,
1365 iemOp_InvalidWithRM,
1366 iemOp_InvalidWithRM
1367};
1368
1369/** Opcode 0x0f 0x00. */
1370FNIEMOP_DEF(iemOp_Grp6)
1371{
1372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1373 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1379{
1380 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1381 IEMOP_HLP_MIN_286();
1382 IEMOP_HLP_64BIT_OP_SIZE();
1383 IEM_MC_BEGIN(2, 1);
1384 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1385 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1389 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1390 IEM_MC_END();
1391}
1392
1393
1394/** Opcode 0x0f 0x01 /0. */
1395FNIEMOP_DEF(iemOp_Grp7_vmcall)
1396{
1397 IEMOP_MNEMONIC(vmcall, "vmcall");
1398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1399
1400 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1401 want all hypercalls regardless of instruction used, and if a
1402 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1403 (NEM/win makes ASSUMPTIONS about this behavior.) */
1404 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_vmcall);
1405}
1406
1407
1408/** Opcode 0x0f 0x01 /0. */
1409#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1410FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1411{
1412 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1413 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1414 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1415 IEMOP_HLP_DONE_DECODING();
1416 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1417 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1418 iemCImpl_vmlaunch);
1419}
1420#else
1421FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1422{
1423 IEMOP_BITCH_ABOUT_STUB();
1424 IEMOP_RAISE_INVALID_OPCODE_RET();
1425}
1426#endif
1427
1428
1429/** Opcode 0x0f 0x01 /0. */
1430#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1431FNIEMOP_DEF(iemOp_Grp7_vmresume)
1432{
1433 IEMOP_MNEMONIC(vmresume, "vmresume");
1434 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1435 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1436 IEMOP_HLP_DONE_DECODING();
1437 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1438 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1439 iemCImpl_vmresume);
1440}
1441#else
1442FNIEMOP_DEF(iemOp_Grp7_vmresume)
1443{
1444 IEMOP_BITCH_ABOUT_STUB();
1445 IEMOP_RAISE_INVALID_OPCODE_RET();
1446}
1447#endif
1448
1449
1450/** Opcode 0x0f 0x01 /0. */
1451#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1452FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1453{
1454 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1455 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1456 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1457 IEMOP_HLP_DONE_DECODING();
1458 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmxoff);
1459}
1460#else
1461FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1462{
1463 IEMOP_BITCH_ABOUT_STUB();
1464 IEMOP_RAISE_INVALID_OPCODE_RET();
1465}
1466#endif
1467
1468
1469/** Opcode 0x0f 0x01 /1. */
1470FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1471{
1472 IEMOP_MNEMONIC(sidt, "sidt Ms");
1473 IEMOP_HLP_MIN_286();
1474 IEMOP_HLP_64BIT_OP_SIZE();
1475 IEM_MC_BEGIN(2, 1);
1476 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1477 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1480 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1481 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1482 IEM_MC_END();
1483}
1484
1485
1486/** Opcode 0x0f 0x01 /1. */
1487FNIEMOP_DEF(iemOp_Grp7_monitor)
1488{
1489 IEMOP_MNEMONIC(monitor, "monitor");
1490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1491 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1492}
1493
1494
1495/** Opcode 0x0f 0x01 /1. */
1496FNIEMOP_DEF(iemOp_Grp7_mwait)
1497{
1498 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_mwait);
1501}
1502
1503
1504/** Opcode 0x0f 0x01 /2. */
1505FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1506{
1507 IEMOP_MNEMONIC(lgdt, "lgdt");
1508 IEMOP_HLP_64BIT_OP_SIZE();
1509 IEM_MC_BEGIN(3, 1);
1510 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1511 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1512 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1515 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1516 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1517 IEM_MC_END();
1518}
1519
1520
1521/** Opcode 0x0f 0x01 0xd0. */
1522FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1523{
1524 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1525 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1526 {
1527 /** @todo r=ramshankar: We should use
1528 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1529 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1530/** @todo testcase: test prefixes and exceptions. currently not checking for the
1531 * OPSIZE one ... */
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1533 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_xgetbv);
1534 }
1535 IEMOP_RAISE_INVALID_OPCODE_RET();
1536}
1537
1538
1539/** Opcode 0x0f 0x01 0xd1. */
1540FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1541{
1542 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1543 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1544 {
1545 /** @todo r=ramshankar: We should use
1546 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1547 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1548/** @todo testcase: test prefixes and exceptions. currently not checking for the
1549 * OPSIZE one ... */
1550 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1551 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_xsetbv);
1552 }
1553 IEMOP_RAISE_INVALID_OPCODE_RET();
1554}
1555
1556
1557/** Opcode 0x0f 0x01 /3. */
1558FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1559{
1560 IEMOP_MNEMONIC(lidt, "lidt");
1561 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1562 IEM_MC_BEGIN(3, 1);
1563 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1564 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1565 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1568 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1569 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1570 IEM_MC_END();
1571}
1572
1573
1574/** Opcode 0x0f 0x01 0xd8. */
1575#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1576FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1577{
1578 IEMOP_MNEMONIC(vmrun, "vmrun");
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1580 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1581 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1582 iemCImpl_vmrun);
1583}
1584#else
1585FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1586#endif
1587
1588/** Opcode 0x0f 0x01 0xd9. */
1589FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1590{
1591 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1592 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1593 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1594 * here cannot be right... */
1595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1596
1597 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1598 want all hypercalls regardless of instruction used, and if a
1599 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1600 (NEM/win makes ASSUMPTIONS about this behavior.) */
1601 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmmcall);
1602}
1603
1604/** Opcode 0x0f 0x01 0xda. */
1605#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1606FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1607{
1608 IEMOP_MNEMONIC(vmload, "vmload");
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1610 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmload);
1611}
1612#else
1613FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1614#endif
1615
1616
1617/** Opcode 0x0f 0x01 0xdb. */
1618#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1619FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1620{
1621 IEMOP_MNEMONIC(vmsave, "vmsave");
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1623 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmsave);
1624}
1625#else
1626FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1627#endif
1628
1629
1630/** Opcode 0x0f 0x01 0xdc. */
1631#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1632FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1633{
1634 IEMOP_MNEMONIC(stgi, "stgi");
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1636 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_stgi);
1637}
1638#else
1639FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1640#endif
1641
1642
1643/** Opcode 0x0f 0x01 0xdd. */
1644#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1645FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1646{
1647 IEMOP_MNEMONIC(clgi, "clgi");
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1649 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clgi);
1650}
1651#else
1652FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1653#endif
1654
1655
1656/** Opcode 0x0f 0x01 0xdf. */
1657#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1658FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1659{
1660 IEMOP_MNEMONIC(invlpga, "invlpga");
1661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1662 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpga);
1663}
1664#else
1665FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1666#endif
1667
1668
1669/** Opcode 0x0f 0x01 0xde. */
1670#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1671FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1672{
1673 IEMOP_MNEMONIC(skinit, "skinit");
1674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1675 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_skinit);
1676}
1677#else
1678FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1679#endif
1680
1681
1682/** Opcode 0x0f 0x01 /4. */
1683FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1684{
1685 IEMOP_MNEMONIC(smsw, "smsw");
1686 IEMOP_HLP_MIN_286();
1687 if (IEM_IS_MODRM_REG_MODE(bRm))
1688 {
1689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1690 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1691 }
1692
1693 /* Ignore operand size here, memory refs are always 16-bit. */
1694 IEM_MC_BEGIN(2, 0);
1695 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1696 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1699 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1700 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1701 IEM_MC_END();
1702}
1703
1704
1705/** Opcode 0x0f 0x01 /6. */
1706FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1707{
1708 /* The operand size is effectively ignored, all is 16-bit and only the
1709 lower 3-bits are used. */
1710 IEMOP_MNEMONIC(lmsw, "lmsw");
1711 IEMOP_HLP_MIN_286();
1712 if (IEM_IS_MODRM_REG_MODE(bRm))
1713 {
1714 IEM_MC_BEGIN(2, 0);
1715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1716 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1717 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1718 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1719 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1720 IEM_MC_END();
1721 }
1722 else
1723 {
1724 IEM_MC_BEGIN(2, 0);
1725 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1726 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1729 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1730 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1731 IEM_MC_END();
1732 }
1733}
1734
1735
1736/** Opcode 0x0f 0x01 /7. */
1737FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1738{
1739 IEMOP_MNEMONIC(invlpg, "invlpg");
1740 IEMOP_HLP_MIN_486();
1741 IEM_MC_BEGIN(1, 1);
1742 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1745 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpg, GCPtrEffDst);
1746 IEM_MC_END();
1747}
1748
1749
1750/** Opcode 0x0f 0x01 0xf8. */
1751FNIEMOP_DEF(iemOp_Grp7_swapgs)
1752{
1753 IEMOP_MNEMONIC(swapgs, "swapgs");
1754 IEMOP_HLP_ONLY_64BIT();
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1756 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_swapgs);
1757}
1758
1759
1760/** Opcode 0x0f 0x01 0xf9. */
1761FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1762{
1763 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1765 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtscp);
1766}
1767
1768
1769/**
1770 * Group 7 jump table, memory variant.
1771 */
1772IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1773{
1774 iemOp_Grp7_sgdt,
1775 iemOp_Grp7_sidt,
1776 iemOp_Grp7_lgdt,
1777 iemOp_Grp7_lidt,
1778 iemOp_Grp7_smsw,
1779 iemOp_InvalidWithRM,
1780 iemOp_Grp7_lmsw,
1781 iemOp_Grp7_invlpg
1782};
1783
1784
1785/** Opcode 0x0f 0x01. */
1786FNIEMOP_DEF(iemOp_Grp7)
1787{
1788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1789 if (IEM_IS_MODRM_MEM_MODE(bRm))
1790 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1791
1792 switch (IEM_GET_MODRM_REG_8(bRm))
1793 {
1794 case 0:
1795 switch (IEM_GET_MODRM_RM_8(bRm))
1796 {
1797 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1798 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1799 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1800 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1801 }
1802 IEMOP_RAISE_INVALID_OPCODE_RET();
1803
1804 case 1:
1805 switch (IEM_GET_MODRM_RM_8(bRm))
1806 {
1807 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1808 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1809 }
1810 IEMOP_RAISE_INVALID_OPCODE_RET();
1811
1812 case 2:
1813 switch (IEM_GET_MODRM_RM_8(bRm))
1814 {
1815 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1816 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1817 }
1818 IEMOP_RAISE_INVALID_OPCODE_RET();
1819
1820 case 3:
1821 switch (IEM_GET_MODRM_RM_8(bRm))
1822 {
1823 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1824 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1825 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1826 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1827 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1828 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1829 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1830 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1832 }
1833
1834 case 4:
1835 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1836
1837 case 5:
1838 IEMOP_RAISE_INVALID_OPCODE_RET();
1839
1840 case 6:
1841 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1842
1843 case 7:
1844 switch (IEM_GET_MODRM_RM_8(bRm))
1845 {
1846 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1847 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1848 }
1849 IEMOP_RAISE_INVALID_OPCODE_RET();
1850
1851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1852 }
1853}
1854
1855/** Opcode 0x0f 0x00 /3. */
1856FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1857{
1858 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1860
1861 if (IEM_IS_MODRM_REG_MODE(bRm))
1862 {
1863 switch (pVCpu->iem.s.enmEffOpSize)
1864 {
1865 case IEMMODE_16BIT:
1866 {
1867 IEM_MC_BEGIN(3, 0);
1868 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1869 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1870 IEM_MC_ARG(uint16_t, u16Sel, 1);
1871 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1872
1873 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1874 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1875 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1876
1877 IEM_MC_END();
1878 }
1879
1880 case IEMMODE_32BIT:
1881 case IEMMODE_64BIT:
1882 {
1883 IEM_MC_BEGIN(3, 0);
1884 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1885 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1886 IEM_MC_ARG(uint16_t, u16Sel, 1);
1887 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1888
1889 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1890 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1891 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1892
1893 IEM_MC_END();
1894 }
1895
1896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1897 }
1898 }
1899 else
1900 {
1901 switch (pVCpu->iem.s.enmEffOpSize)
1902 {
1903 case IEMMODE_16BIT:
1904 {
1905 IEM_MC_BEGIN(3, 1);
1906 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1907 IEM_MC_ARG(uint16_t, u16Sel, 1);
1908 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1910
1911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1912 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1913
1914 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1915 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1916 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1917
1918 IEM_MC_END();
1919 }
1920
1921 case IEMMODE_32BIT:
1922 case IEMMODE_64BIT:
1923 {
1924 IEM_MC_BEGIN(3, 1);
1925 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1926 IEM_MC_ARG(uint16_t, u16Sel, 1);
1927 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1929
1930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1931 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1932/** @todo testcase: make sure it's a 16-bit read. */
1933
1934 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1935 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1936 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1937
1938 IEM_MC_END();
1939 }
1940
1941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1942 }
1943 }
1944}
1945
1946
1947
1948/** Opcode 0x0f 0x02. */
1949FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1950{
1951 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1952 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1953}
1954
1955
1956/** Opcode 0x0f 0x03. */
1957FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1958{
1959 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1960 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1961}
1962
1963
1964/** Opcode 0x0f 0x05. */
1965FNIEMOP_DEF(iemOp_syscall)
1966{
1967 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1969 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1970 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1971 iemCImpl_syscall);
1972}
1973
1974
1975/** Opcode 0x0f 0x06. */
1976FNIEMOP_DEF(iemOp_clts)
1977{
1978 IEMOP_MNEMONIC(clts, "clts");
1979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1980 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clts);
1981}
1982
1983
1984/** Opcode 0x0f 0x07. */
1985FNIEMOP_DEF(iemOp_sysret)
1986{
1987 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1989 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1990 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1991 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1992}
1993
1994
1995/** Opcode 0x0f 0x08. */
1996FNIEMOP_DEF(iemOp_invd)
1997{
1998 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1999 IEMOP_HLP_MIN_486();
2000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2001 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invd);
2002}
2003
2004
2005/** Opcode 0x0f 0x09. */
2006FNIEMOP_DEF(iemOp_wbinvd)
2007{
2008 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2009 IEMOP_HLP_MIN_486();
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wbinvd);
2012}
2013
2014
2015/** Opcode 0x0f 0x0b. */
2016FNIEMOP_DEF(iemOp_ud2)
2017{
2018 IEMOP_MNEMONIC(ud2, "ud2");
2019 IEMOP_RAISE_INVALID_OPCODE_RET();
2020}
2021
2022/** Opcode 0x0f 0x0d. */
2023FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2024{
2025 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2026 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2027 {
2028 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2029 IEMOP_RAISE_INVALID_OPCODE_RET();
2030 }
2031
2032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2033 if (IEM_IS_MODRM_REG_MODE(bRm))
2034 {
2035 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2036 IEMOP_RAISE_INVALID_OPCODE_RET();
2037 }
2038
2039 switch (IEM_GET_MODRM_REG_8(bRm))
2040 {
2041 case 2: /* Aliased to /0 for the time being. */
2042 case 4: /* Aliased to /0 for the time being. */
2043 case 5: /* Aliased to /0 for the time being. */
2044 case 6: /* Aliased to /0 for the time being. */
2045 case 7: /* Aliased to /0 for the time being. */
2046 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2047 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2048 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2050 }
2051
2052 IEM_MC_BEGIN(0, 1);
2053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2056 /* Currently a NOP. */
2057 NOREF(GCPtrEffSrc);
2058 IEM_MC_ADVANCE_RIP_AND_FINISH();
2059 IEM_MC_END();
2060}
2061
2062
2063/** Opcode 0x0f 0x0e. */
2064FNIEMOP_DEF(iemOp_femms)
2065{
2066 IEMOP_MNEMONIC(femms, "femms");
2067
2068 IEM_MC_BEGIN(0,0);
2069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2072 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2073 IEM_MC_FPU_FROM_MMX_MODE();
2074 IEM_MC_ADVANCE_RIP_AND_FINISH();
2075 IEM_MC_END();
2076}
2077
2078
2079/** Opcode 0x0f 0x0f. */
2080FNIEMOP_DEF(iemOp_3Dnow)
2081{
2082 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2083 {
2084 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2085 IEMOP_RAISE_INVALID_OPCODE_RET();
2086 }
2087
2088#ifdef IEM_WITH_3DNOW
2089 /* This is pretty sparse, use switch instead of table. */
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2092#else
2093 IEMOP_BITCH_ABOUT_STUB();
2094 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2095#endif
2096}
2097
2098
2099/**
2100 * @opcode 0x10
2101 * @oppfx none
2102 * @opcpuid sse
2103 * @opgroup og_sse_simdfp_datamove
2104 * @opxcpttype 4UA
2105 * @optest op1=1 op2=2 -> op1=2
2106 * @optest op1=0 op2=-22 -> op1=-22
2107 */
2108FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2109{
2110 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2112 if (IEM_IS_MODRM_REG_MODE(bRm))
2113 {
2114 /*
2115 * XMM128, XMM128.
2116 */
2117 IEM_MC_BEGIN(0, 0);
2118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2119 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2120 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2121 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2122 IEM_GET_MODRM_RM(pVCpu, bRm));
2123 IEM_MC_ADVANCE_RIP_AND_FINISH();
2124 IEM_MC_END();
2125 }
2126 else
2127 {
2128 /*
2129 * XMM128, [mem128].
2130 */
2131 IEM_MC_BEGIN(0, 2);
2132 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134
2135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2137 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2139
2140 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2141 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2142
2143 IEM_MC_ADVANCE_RIP_AND_FINISH();
2144 IEM_MC_END();
2145 }
2146
2147}
2148
2149
2150/**
2151 * @opcode 0x10
2152 * @oppfx 0x66
2153 * @opcpuid sse2
2154 * @opgroup og_sse2_pcksclr_datamove
2155 * @opxcpttype 4UA
2156 * @optest op1=1 op2=2 -> op1=2
2157 * @optest op1=0 op2=-42 -> op1=-42
2158 */
2159FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2160{
2161 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2163 if (IEM_IS_MODRM_REG_MODE(bRm))
2164 {
2165 /*
2166 * XMM128, XMM128.
2167 */
2168 IEM_MC_BEGIN(0, 0);
2169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2170 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2173 IEM_GET_MODRM_RM(pVCpu, bRm));
2174 IEM_MC_ADVANCE_RIP_AND_FINISH();
2175 IEM_MC_END();
2176 }
2177 else
2178 {
2179 /*
2180 * XMM128, [mem128].
2181 */
2182 IEM_MC_BEGIN(0, 2);
2183 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2185
2186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2188 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2189 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2190
2191 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2192 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2193
2194 IEM_MC_ADVANCE_RIP_AND_FINISH();
2195 IEM_MC_END();
2196 }
2197}
2198
2199
2200/**
2201 * @opcode 0x10
2202 * @oppfx 0xf3
2203 * @opcpuid sse
2204 * @opgroup og_sse_simdfp_datamove
2205 * @opxcpttype 5
2206 * @optest op1=1 op2=2 -> op1=2
2207 * @optest op1=0 op2=-22 -> op1=-22
2208 */
2209FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2210{
2211 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 if (IEM_IS_MODRM_REG_MODE(bRm))
2214 {
2215 /*
2216 * XMM32, XMM32.
2217 */
2218 IEM_MC_BEGIN(0, 1);
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2220 IEM_MC_LOCAL(uint32_t, uSrc);
2221
2222 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2224 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2225 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2226
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /*
2233 * XMM128, [mem32].
2234 */
2235 IEM_MC_BEGIN(0, 2);
2236 IEM_MC_LOCAL(uint32_t, uSrc);
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2243
2244 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2245 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2246
2247 IEM_MC_ADVANCE_RIP_AND_FINISH();
2248 IEM_MC_END();
2249 }
2250}
2251
2252
2253/**
2254 * @opcode 0x10
2255 * @oppfx 0xf2
2256 * @opcpuid sse2
2257 * @opgroup og_sse2_pcksclr_datamove
2258 * @opxcpttype 5
2259 * @optest op1=1 op2=2 -> op1=2
2260 * @optest op1=0 op2=-42 -> op1=-42
2261 */
2262FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2263{
2264 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2266 if (IEM_IS_MODRM_REG_MODE(bRm))
2267 {
2268 /*
2269 * XMM64, XMM64.
2270 */
2271 IEM_MC_BEGIN(0, 1);
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2273 IEM_MC_LOCAL(uint64_t, uSrc);
2274
2275 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2277 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2278 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2279
2280 IEM_MC_ADVANCE_RIP_AND_FINISH();
2281 IEM_MC_END();
2282 }
2283 else
2284 {
2285 /*
2286 * XMM128, [mem64].
2287 */
2288 IEM_MC_BEGIN(0, 2);
2289 IEM_MC_LOCAL(uint64_t, uSrc);
2290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2291
2292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2294 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2295 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2296
2297 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2298 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2299
2300 IEM_MC_ADVANCE_RIP_AND_FINISH();
2301 IEM_MC_END();
2302 }
2303}
2304
2305
2306/**
2307 * @opcode 0x11
2308 * @oppfx none
2309 * @opcpuid sse
2310 * @opgroup og_sse_simdfp_datamove
2311 * @opxcpttype 4UA
2312 * @optest op1=1 op2=2 -> op1=2
2313 * @optest op1=0 op2=-42 -> op1=-42
2314 */
2315FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2316{
2317 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2319 if (IEM_IS_MODRM_REG_MODE(bRm))
2320 {
2321 /*
2322 * XMM128, XMM128.
2323 */
2324 IEM_MC_BEGIN(0, 0);
2325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2326 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2327 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2328 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2329 IEM_GET_MODRM_REG(pVCpu, bRm));
2330 IEM_MC_ADVANCE_RIP_AND_FINISH();
2331 IEM_MC_END();
2332 }
2333 else
2334 {
2335 /*
2336 * [mem128], XMM128.
2337 */
2338 IEM_MC_BEGIN(0, 2);
2339 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2341
2342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2344 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2345 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2346
2347 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2348 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2349
2350 IEM_MC_ADVANCE_RIP_AND_FINISH();
2351 IEM_MC_END();
2352 }
2353}
2354
2355
2356/**
2357 * @opcode 0x11
2358 * @oppfx 0x66
2359 * @opcpuid sse2
2360 * @opgroup og_sse2_pcksclr_datamove
2361 * @opxcpttype 4UA
2362 * @optest op1=1 op2=2 -> op1=2
2363 * @optest op1=0 op2=-42 -> op1=-42
2364 */
2365FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2366{
2367 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2369 if (IEM_IS_MODRM_REG_MODE(bRm))
2370 {
2371 /*
2372 * XMM128, XMM128.
2373 */
2374 IEM_MC_BEGIN(0, 0);
2375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2376 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2377 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2378 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2379 IEM_GET_MODRM_REG(pVCpu, bRm));
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 }
2383 else
2384 {
2385 /*
2386 * [mem128], XMM128.
2387 */
2388 IEM_MC_BEGIN(0, 2);
2389 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2396
2397 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2398 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2399
2400 IEM_MC_ADVANCE_RIP_AND_FINISH();
2401 IEM_MC_END();
2402 }
2403}
2404
2405
2406/**
2407 * @opcode 0x11
2408 * @oppfx 0xf3
2409 * @opcpuid sse
2410 * @opgroup og_sse_simdfp_datamove
2411 * @opxcpttype 5
2412 * @optest op1=1 op2=2 -> op1=2
2413 * @optest op1=0 op2=-22 -> op1=-22
2414 */
2415FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2416{
2417 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2419 if (IEM_IS_MODRM_REG_MODE(bRm))
2420 {
2421 /*
2422 * XMM32, XMM32.
2423 */
2424 IEM_MC_BEGIN(0, 1);
2425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427
2428 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2429 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2430 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2431 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2432
2433 IEM_MC_ADVANCE_RIP_AND_FINISH();
2434 IEM_MC_END();
2435 }
2436 else
2437 {
2438 /*
2439 * [mem32], XMM32.
2440 */
2441 IEM_MC_BEGIN(0, 2);
2442 IEM_MC_LOCAL(uint32_t, uSrc);
2443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2444
2445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2449
2450 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2451 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2452
2453 IEM_MC_ADVANCE_RIP_AND_FINISH();
2454 IEM_MC_END();
2455 }
2456}
2457
2458
2459/**
2460 * @opcode 0x11
2461 * @oppfx 0xf2
2462 * @opcpuid sse2
2463 * @opgroup og_sse2_pcksclr_datamove
2464 * @opxcpttype 5
2465 * @optest op1=1 op2=2 -> op1=2
2466 * @optest op1=0 op2=-42 -> op1=-42
2467 */
2468FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2469{
2470 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2472 if (IEM_IS_MODRM_REG_MODE(bRm))
2473 {
2474 /*
2475 * XMM64, XMM64.
2476 */
2477 IEM_MC_BEGIN(0, 1);
2478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2479 IEM_MC_LOCAL(uint64_t, uSrc);
2480
2481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2483 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2484 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2485
2486 IEM_MC_ADVANCE_RIP_AND_FINISH();
2487 IEM_MC_END();
2488 }
2489 else
2490 {
2491 /*
2492 * [mem64], XMM64.
2493 */
2494 IEM_MC_BEGIN(0, 2);
2495 IEM_MC_LOCAL(uint64_t, uSrc);
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2500 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2502
2503 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2504 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2505
2506 IEM_MC_ADVANCE_RIP_AND_FINISH();
2507 IEM_MC_END();
2508 }
2509}
2510
2511
2512FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2513{
2514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2515 if (IEM_IS_MODRM_REG_MODE(bRm))
2516 {
2517 /**
2518 * @opcode 0x12
2519 * @opcodesub 11 mr/reg
2520 * @oppfx none
2521 * @opcpuid sse
2522 * @opgroup og_sse_simdfp_datamove
2523 * @opxcpttype 5
2524 * @optest op1=1 op2=2 -> op1=2
2525 * @optest op1=0 op2=-42 -> op1=-42
2526 */
2527 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2528
2529 IEM_MC_BEGIN(0, 1);
2530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2531 IEM_MC_LOCAL(uint64_t, uSrc);
2532
2533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2535 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2536 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP_AND_FINISH();
2539 IEM_MC_END();
2540 }
2541 else
2542 {
2543 /**
2544 * @opdone
2545 * @opcode 0x12
2546 * @opcodesub !11 mr/reg
2547 * @oppfx none
2548 * @opcpuid sse
2549 * @opgroup og_sse_simdfp_datamove
2550 * @opxcpttype 5
2551 * @optest op1=1 op2=2 -> op1=2
2552 * @optest op1=0 op2=-42 -> op1=-42
2553 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2554 */
2555 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2556
2557 IEM_MC_BEGIN(0, 2);
2558 IEM_MC_LOCAL(uint64_t, uSrc);
2559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2560
2561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2565
2566 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2567 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2568
2569 IEM_MC_ADVANCE_RIP_AND_FINISH();
2570 IEM_MC_END();
2571 }
2572}
2573
2574
2575/**
2576 * @opcode 0x12
2577 * @opcodesub !11 mr/reg
2578 * @oppfx 0x66
2579 * @opcpuid sse2
2580 * @opgroup og_sse2_pcksclr_datamove
2581 * @opxcpttype 5
2582 * @optest op1=1 op2=2 -> op1=2
2583 * @optest op1=0 op2=-42 -> op1=-42
2584 */
2585FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2586{
2587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2588 if (IEM_IS_MODRM_MEM_MODE(bRm))
2589 {
2590 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2591
2592 IEM_MC_BEGIN(0, 2);
2593 IEM_MC_LOCAL(uint64_t, uSrc);
2594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2595
2596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2600
2601 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2602 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2603
2604 IEM_MC_ADVANCE_RIP_AND_FINISH();
2605 IEM_MC_END();
2606 }
2607
2608 /**
2609 * @opdone
2610 * @opmnemonic ud660f12m3
2611 * @opcode 0x12
2612 * @opcodesub 11 mr/reg
2613 * @oppfx 0x66
2614 * @opunused immediate
2615 * @opcpuid sse
2616 * @optest ->
2617 */
2618 else
2619 IEMOP_RAISE_INVALID_OPCODE_RET();
2620}
2621
2622
2623/**
2624 * @opcode 0x12
2625 * @oppfx 0xf3
2626 * @opcpuid sse3
2627 * @opgroup og_sse3_pcksclr_datamove
2628 * @opxcpttype 4
2629 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2630 * op1=0x00000002000000020000000100000001
2631 */
2632FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2633{
2634 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2636 if (IEM_IS_MODRM_REG_MODE(bRm))
2637 {
2638 /*
2639 * XMM, XMM.
2640 */
2641 IEM_MC_BEGIN(0, 1);
2642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2643 IEM_MC_LOCAL(RTUINT128U, uSrc);
2644
2645 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2646 IEM_MC_PREPARE_SSE_USAGE();
2647
2648 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2649 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2650 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2651 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2652 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2653
2654 IEM_MC_ADVANCE_RIP_AND_FINISH();
2655 IEM_MC_END();
2656 }
2657 else
2658 {
2659 /*
2660 * XMM, [mem128].
2661 */
2662 IEM_MC_BEGIN(0, 2);
2663 IEM_MC_LOCAL(RTUINT128U, uSrc);
2664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2665
2666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2668 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2669 IEM_MC_PREPARE_SSE_USAGE();
2670
2671 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2672 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2673 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2674 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2675 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2676
2677 IEM_MC_ADVANCE_RIP_AND_FINISH();
2678 IEM_MC_END();
2679 }
2680}
2681
2682
2683/**
2684 * @opcode 0x12
2685 * @oppfx 0xf2
2686 * @opcpuid sse3
2687 * @opgroup og_sse3_pcksclr_datamove
2688 * @opxcpttype 5
2689 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2690 * op1=0x22222222111111112222222211111111
2691 */
2692FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2693{
2694 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2696 if (IEM_IS_MODRM_REG_MODE(bRm))
2697 {
2698 /*
2699 * XMM128, XMM64.
2700 */
2701 IEM_MC_BEGIN(1, 0);
2702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2703 IEM_MC_ARG(uint64_t, uSrc, 0);
2704
2705 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2706 IEM_MC_PREPARE_SSE_USAGE();
2707
2708 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2709 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2710 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2711
2712 IEM_MC_ADVANCE_RIP_AND_FINISH();
2713 IEM_MC_END();
2714 }
2715 else
2716 {
2717 /*
2718 * XMM128, [mem64].
2719 */
2720 IEM_MC_BEGIN(1, 1);
2721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2722 IEM_MC_ARG(uint64_t, uSrc, 0);
2723
2724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2726 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2727 IEM_MC_PREPARE_SSE_USAGE();
2728
2729 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2730 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2731 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2732
2733 IEM_MC_ADVANCE_RIP_AND_FINISH();
2734 IEM_MC_END();
2735 }
2736}
2737
2738
2739/**
2740 * @opcode 0x13
2741 * @opcodesub !11 mr/reg
2742 * @oppfx none
2743 * @opcpuid sse
2744 * @opgroup og_sse_simdfp_datamove
2745 * @opxcpttype 5
2746 * @optest op1=1 op2=2 -> op1=2
2747 * @optest op1=0 op2=-42 -> op1=-42
2748 */
2749FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2750{
2751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2752 if (IEM_IS_MODRM_MEM_MODE(bRm))
2753 {
2754 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2755
2756 IEM_MC_BEGIN(0, 2);
2757 IEM_MC_LOCAL(uint64_t, uSrc);
2758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2759
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2762 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2763 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2764
2765 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2766 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2767
2768 IEM_MC_ADVANCE_RIP_AND_FINISH();
2769 IEM_MC_END();
2770 }
2771
2772 /**
2773 * @opdone
2774 * @opmnemonic ud0f13m3
2775 * @opcode 0x13
2776 * @opcodesub 11 mr/reg
2777 * @oppfx none
2778 * @opunused immediate
2779 * @opcpuid sse
2780 * @optest ->
2781 */
2782 else
2783 IEMOP_RAISE_INVALID_OPCODE_RET();
2784}
2785
2786
2787/**
2788 * @opcode 0x13
2789 * @opcodesub !11 mr/reg
2790 * @oppfx 0x66
2791 * @opcpuid sse2
2792 * @opgroup og_sse2_pcksclr_datamove
2793 * @opxcpttype 5
2794 * @optest op1=1 op2=2 -> op1=2
2795 * @optest op1=0 op2=-42 -> op1=-42
2796 */
2797FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2798{
2799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2800 if (IEM_IS_MODRM_MEM_MODE(bRm))
2801 {
2802 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2803 IEM_MC_BEGIN(0, 2);
2804 IEM_MC_LOCAL(uint64_t, uSrc);
2805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2806
2807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2809 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2810 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2811
2812 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2813 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2814
2815 IEM_MC_ADVANCE_RIP_AND_FINISH();
2816 IEM_MC_END();
2817 }
2818
2819 /**
2820 * @opdone
2821 * @opmnemonic ud660f13m3
2822 * @opcode 0x13
2823 * @opcodesub 11 mr/reg
2824 * @oppfx 0x66
2825 * @opunused immediate
2826 * @opcpuid sse
2827 * @optest ->
2828 */
2829 else
2830 IEMOP_RAISE_INVALID_OPCODE_RET();
2831}
2832
2833
2834/**
2835 * @opmnemonic udf30f13
2836 * @opcode 0x13
2837 * @oppfx 0xf3
2838 * @opunused intel-modrm
2839 * @opcpuid sse
2840 * @optest ->
2841 * @opdone
2842 */
2843
2844/**
2845 * @opmnemonic udf20f13
2846 * @opcode 0x13
2847 * @oppfx 0xf2
2848 * @opunused intel-modrm
2849 * @opcpuid sse
2850 * @optest ->
2851 * @opdone
2852 */
2853
2854/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2855FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2856{
2857 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2858 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2859}
2860
2861
2862/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2863FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2864{
2865 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2866 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2867}
2868
2869
2870/**
2871 * @opdone
2872 * @opmnemonic udf30f14
2873 * @opcode 0x14
2874 * @oppfx 0xf3
2875 * @opunused intel-modrm
2876 * @opcpuid sse
2877 * @optest ->
2878 * @opdone
2879 */
2880
2881/**
2882 * @opmnemonic udf20f14
2883 * @opcode 0x14
2884 * @oppfx 0xf2
2885 * @opunused intel-modrm
2886 * @opcpuid sse
2887 * @optest ->
2888 * @opdone
2889 */
2890
2891/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2892FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2893{
2894 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2895 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2896}
2897
2898
2899/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2900FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2901{
2902 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2903 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2904}
2905
2906
2907/* Opcode 0xf3 0x0f 0x15 - invalid */
2908/* Opcode 0xf2 0x0f 0x15 - invalid */
2909
2910/**
2911 * @opdone
2912 * @opmnemonic udf30f15
2913 * @opcode 0x15
2914 * @oppfx 0xf3
2915 * @opunused intel-modrm
2916 * @opcpuid sse
2917 * @optest ->
2918 * @opdone
2919 */
2920
2921/**
2922 * @opmnemonic udf20f15
2923 * @opcode 0x15
2924 * @oppfx 0xf2
2925 * @opunused intel-modrm
2926 * @opcpuid sse
2927 * @optest ->
2928 * @opdone
2929 */
2930
2931FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2932{
2933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2934 if (IEM_IS_MODRM_REG_MODE(bRm))
2935 {
2936 /**
2937 * @opcode 0x16
2938 * @opcodesub 11 mr/reg
2939 * @oppfx none
2940 * @opcpuid sse
2941 * @opgroup og_sse_simdfp_datamove
2942 * @opxcpttype 5
2943 * @optest op1=1 op2=2 -> op1=2
2944 * @optest op1=0 op2=-42 -> op1=-42
2945 */
2946 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2947
2948 IEM_MC_BEGIN(0, 1);
2949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2950 IEM_MC_LOCAL(uint64_t, uSrc);
2951
2952 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2953 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2954 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2955 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2956
2957 IEM_MC_ADVANCE_RIP_AND_FINISH();
2958 IEM_MC_END();
2959 }
2960 else
2961 {
2962 /**
2963 * @opdone
2964 * @opcode 0x16
2965 * @opcodesub !11 mr/reg
2966 * @oppfx none
2967 * @opcpuid sse
2968 * @opgroup og_sse_simdfp_datamove
2969 * @opxcpttype 5
2970 * @optest op1=1 op2=2 -> op1=2
2971 * @optest op1=0 op2=-42 -> op1=-42
2972 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2973 */
2974 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2975
2976 IEM_MC_BEGIN(0, 2);
2977 IEM_MC_LOCAL(uint64_t, uSrc);
2978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2979
2980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2983 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2984
2985 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2986 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2987
2988 IEM_MC_ADVANCE_RIP_AND_FINISH();
2989 IEM_MC_END();
2990 }
2991}
2992
2993
2994/**
2995 * @opcode 0x16
2996 * @opcodesub !11 mr/reg
2997 * @oppfx 0x66
2998 * @opcpuid sse2
2999 * @opgroup og_sse2_pcksclr_datamove
3000 * @opxcpttype 5
3001 * @optest op1=1 op2=2 -> op1=2
3002 * @optest op1=0 op2=-42 -> op1=-42
3003 */
3004FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3005{
3006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3007 if (IEM_IS_MODRM_MEM_MODE(bRm))
3008 {
3009 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3010 IEM_MC_BEGIN(0, 2);
3011 IEM_MC_LOCAL(uint64_t, uSrc);
3012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3013
3014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3016 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3017 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3018
3019 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3020 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3021
3022 IEM_MC_ADVANCE_RIP_AND_FINISH();
3023 IEM_MC_END();
3024 }
3025
3026 /**
3027 * @opdone
3028 * @opmnemonic ud660f16m3
3029 * @opcode 0x16
3030 * @opcodesub 11 mr/reg
3031 * @oppfx 0x66
3032 * @opunused immediate
3033 * @opcpuid sse
3034 * @optest ->
3035 */
3036 else
3037 IEMOP_RAISE_INVALID_OPCODE_RET();
3038}
3039
3040
3041/**
3042 * @opcode 0x16
3043 * @oppfx 0xf3
3044 * @opcpuid sse3
3045 * @opgroup og_sse3_pcksclr_datamove
3046 * @opxcpttype 4
3047 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3048 * op1=0x00000002000000020000000100000001
3049 */
3050FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3051{
3052 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3054 if (IEM_IS_MODRM_REG_MODE(bRm))
3055 {
3056 /*
3057 * XMM128, XMM128.
3058 */
3059 IEM_MC_BEGIN(0, 1);
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3061 IEM_MC_LOCAL(RTUINT128U, uSrc);
3062
3063 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3064 IEM_MC_PREPARE_SSE_USAGE();
3065
3066 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3067 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3068 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3069 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3070 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3071
3072 IEM_MC_ADVANCE_RIP_AND_FINISH();
3073 IEM_MC_END();
3074 }
3075 else
3076 {
3077 /*
3078 * XMM128, [mem128].
3079 */
3080 IEM_MC_BEGIN(0, 2);
3081 IEM_MC_LOCAL(RTUINT128U, uSrc);
3082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3083
3084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3086 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3087 IEM_MC_PREPARE_SSE_USAGE();
3088
3089 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3090 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3091 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3092 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3093 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3094
3095 IEM_MC_ADVANCE_RIP_AND_FINISH();
3096 IEM_MC_END();
3097 }
3098}
3099
3100/**
3101 * @opdone
3102 * @opmnemonic udf30f16
3103 * @opcode 0x16
3104 * @oppfx 0xf2
3105 * @opunused intel-modrm
3106 * @opcpuid sse
3107 * @optest ->
3108 * @opdone
3109 */
3110
3111
3112/**
3113 * @opcode 0x17
3114 * @opcodesub !11 mr/reg
3115 * @oppfx none
3116 * @opcpuid sse
3117 * @opgroup og_sse_simdfp_datamove
3118 * @opxcpttype 5
3119 * @optest op1=1 op2=2 -> op1=2
3120 * @optest op1=0 op2=-42 -> op1=-42
3121 */
3122FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3123{
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if (IEM_IS_MODRM_MEM_MODE(bRm))
3126 {
3127 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3128
3129 IEM_MC_BEGIN(0, 2);
3130 IEM_MC_LOCAL(uint64_t, uSrc);
3131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3132
3133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3135 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3136 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3137
3138 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3139 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3140
3141 IEM_MC_ADVANCE_RIP_AND_FINISH();
3142 IEM_MC_END();
3143 }
3144
3145 /**
3146 * @opdone
3147 * @opmnemonic ud0f17m3
3148 * @opcode 0x17
3149 * @opcodesub 11 mr/reg
3150 * @oppfx none
3151 * @opunused immediate
3152 * @opcpuid sse
3153 * @optest ->
3154 */
3155 else
3156 IEMOP_RAISE_INVALID_OPCODE_RET();
3157}
3158
3159
3160/**
3161 * @opcode 0x17
3162 * @opcodesub !11 mr/reg
3163 * @oppfx 0x66
3164 * @opcpuid sse2
3165 * @opgroup og_sse2_pcksclr_datamove
3166 * @opxcpttype 5
3167 * @optest op1=1 op2=2 -> op1=2
3168 * @optest op1=0 op2=-42 -> op1=-42
3169 */
3170FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3171{
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 if (IEM_IS_MODRM_MEM_MODE(bRm))
3174 {
3175 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3176
3177 IEM_MC_BEGIN(0, 2);
3178 IEM_MC_LOCAL(uint64_t, uSrc);
3179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3180
3181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3183 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3185
3186 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3187 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3188
3189 IEM_MC_ADVANCE_RIP_AND_FINISH();
3190 IEM_MC_END();
3191 }
3192
3193 /**
3194 * @opdone
3195 * @opmnemonic ud660f17m3
3196 * @opcode 0x17
3197 * @opcodesub 11 mr/reg
3198 * @oppfx 0x66
3199 * @opunused immediate
3200 * @opcpuid sse
3201 * @optest ->
3202 */
3203 else
3204 IEMOP_RAISE_INVALID_OPCODE_RET();
3205}
3206
3207
3208/**
3209 * @opdone
3210 * @opmnemonic udf30f17
3211 * @opcode 0x17
3212 * @oppfx 0xf3
3213 * @opunused intel-modrm
3214 * @opcpuid sse
3215 * @optest ->
3216 * @opdone
3217 */
3218
3219/**
3220 * @opmnemonic udf20f17
3221 * @opcode 0x17
3222 * @oppfx 0xf2
3223 * @opunused intel-modrm
3224 * @opcpuid sse
3225 * @optest ->
3226 * @opdone
3227 */
3228
3229
3230/** Opcode 0x0f 0x18. */
3231FNIEMOP_DEF(iemOp_prefetch_Grp16)
3232{
3233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3234 if (IEM_IS_MODRM_MEM_MODE(bRm))
3235 {
3236 switch (IEM_GET_MODRM_REG_8(bRm))
3237 {
3238 case 4: /* Aliased to /0 for the time being according to AMD. */
3239 case 5: /* Aliased to /0 for the time being according to AMD. */
3240 case 6: /* Aliased to /0 for the time being according to AMD. */
3241 case 7: /* Aliased to /0 for the time being according to AMD. */
3242 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3243 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3244 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3245 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3247 }
3248
3249 IEM_MC_BEGIN(0, 1);
3250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3253 /* Currently a NOP. */
3254 NOREF(GCPtrEffSrc);
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 else
3259 IEMOP_RAISE_INVALID_OPCODE_RET();
3260}
3261
3262
3263/** Opcode 0x0f 0x19..0x1f. */
3264FNIEMOP_DEF(iemOp_nop_Ev)
3265{
3266 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3268 if (IEM_IS_MODRM_REG_MODE(bRm))
3269 {
3270 IEM_MC_BEGIN(0, 0);
3271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3272 IEM_MC_ADVANCE_RIP_AND_FINISH();
3273 IEM_MC_END();
3274 }
3275 else
3276 {
3277 IEM_MC_BEGIN(0, 1);
3278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 /* Currently a NOP. */
3282 NOREF(GCPtrEffSrc);
3283 IEM_MC_ADVANCE_RIP_AND_FINISH();
3284 IEM_MC_END();
3285 }
3286}
3287
3288
3289/** Opcode 0x0f 0x20. */
3290FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3291{
3292 /* mod is ignored, as is operand size overrides. */
3293 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3294 IEMOP_HLP_MIN_386();
3295 if (IEM_IS_64BIT_CODE(pVCpu))
3296 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3297 else
3298 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3299
3300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3301 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3302 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3303 {
3304 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3305 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3306 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3307 iCrReg |= 8;
3308 }
3309 switch (iCrReg)
3310 {
3311 case 0: case 2: case 3: case 4: case 8:
3312 break;
3313 default:
3314 IEMOP_RAISE_INVALID_OPCODE_RET();
3315 }
3316 IEMOP_HLP_DONE_DECODING();
3317
3318 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3319}
3320
3321
3322/** Opcode 0x0f 0x21. */
3323FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3324{
3325 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3326 IEMOP_HLP_MIN_386();
3327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3329 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3330 IEMOP_RAISE_INVALID_OPCODE_RET();
3331 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3332}
3333
3334
3335/** Opcode 0x0f 0x22. */
3336FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3337{
3338 /* mod is ignored, as is operand size overrides. */
3339 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3340 IEMOP_HLP_MIN_386();
3341 if (IEM_IS_64BIT_CODE(pVCpu))
3342 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3343 else
3344 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3345
3346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3347 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3348 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3349 {
3350 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3351 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3352 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3353 iCrReg |= 8;
3354 }
3355 switch (iCrReg)
3356 {
3357 case 0: case 2: case 3: case 4: case 8:
3358 break;
3359 default:
3360 IEMOP_RAISE_INVALID_OPCODE_RET();
3361 }
3362 IEMOP_HLP_DONE_DECODING();
3363
3364 if (iCrReg & (2 | 8))
3365 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3366 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3367 else
3368 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3369 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3370}
3371
3372
3373/** Opcode 0x0f 0x23. */
3374FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3375{
3376 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3377 IEMOP_HLP_MIN_386();
3378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3380 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3381 IEMOP_RAISE_INVALID_OPCODE_RET();
3382 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3383}
3384
3385
3386/** Opcode 0x0f 0x24. */
3387FNIEMOP_DEF(iemOp_mov_Rd_Td)
3388{
3389 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3390 IEMOP_HLP_MIN_386();
3391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3393 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3394 IEMOP_RAISE_INVALID_OPCODE_RET();
3395 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3396}
3397
3398
3399/** Opcode 0x0f 0x26. */
3400FNIEMOP_DEF(iemOp_mov_Td_Rd)
3401{
3402 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3403 IEMOP_HLP_MIN_386();
3404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3406 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3407 IEMOP_RAISE_INVALID_OPCODE_RET();
3408 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3409}
3410
3411
3412/**
3413 * @opcode 0x28
3414 * @oppfx none
3415 * @opcpuid sse
3416 * @opgroup og_sse_simdfp_datamove
3417 * @opxcpttype 1
3418 * @optest op1=1 op2=2 -> op1=2
3419 * @optest op1=0 op2=-42 -> op1=-42
3420 */
3421FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3422{
3423 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3425 if (IEM_IS_MODRM_REG_MODE(bRm))
3426 {
3427 /*
3428 * Register, register.
3429 */
3430 IEM_MC_BEGIN(0, 0);
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3432 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3434 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3435 IEM_GET_MODRM_RM(pVCpu, bRm));
3436 IEM_MC_ADVANCE_RIP_AND_FINISH();
3437 IEM_MC_END();
3438 }
3439 else
3440 {
3441 /*
3442 * Register, memory.
3443 */
3444 IEM_MC_BEGIN(0, 2);
3445 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3447
3448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3450 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3451 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3452
3453 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3454 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3455
3456 IEM_MC_ADVANCE_RIP_AND_FINISH();
3457 IEM_MC_END();
3458 }
3459}
3460
3461/**
3462 * @opcode 0x28
3463 * @oppfx 66
3464 * @opcpuid sse2
3465 * @opgroup og_sse2_pcksclr_datamove
3466 * @opxcpttype 1
3467 * @optest op1=1 op2=2 -> op1=2
3468 * @optest op1=0 op2=-42 -> op1=-42
3469 */
3470FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3471{
3472 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3474 if (IEM_IS_MODRM_REG_MODE(bRm))
3475 {
3476 /*
3477 * Register, register.
3478 */
3479 IEM_MC_BEGIN(0, 0);
3480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3483 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3484 IEM_GET_MODRM_RM(pVCpu, bRm));
3485 IEM_MC_ADVANCE_RIP_AND_FINISH();
3486 IEM_MC_END();
3487 }
3488 else
3489 {
3490 /*
3491 * Register, memory.
3492 */
3493 IEM_MC_BEGIN(0, 2);
3494 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3496
3497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3499 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3500 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3501
3502 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3503 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3504
3505 IEM_MC_ADVANCE_RIP_AND_FINISH();
3506 IEM_MC_END();
3507 }
3508}
3509
3510/* Opcode 0xf3 0x0f 0x28 - invalid */
3511/* Opcode 0xf2 0x0f 0x28 - invalid */
3512
3513/**
3514 * @opcode 0x29
3515 * @oppfx none
3516 * @opcpuid sse
3517 * @opgroup og_sse_simdfp_datamove
3518 * @opxcpttype 1
3519 * @optest op1=1 op2=2 -> op1=2
3520 * @optest op1=0 op2=-42 -> op1=-42
3521 */
3522FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3523{
3524 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3526 if (IEM_IS_MODRM_REG_MODE(bRm))
3527 {
3528 /*
3529 * Register, register.
3530 */
3531 IEM_MC_BEGIN(0, 0);
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3535 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3536 IEM_GET_MODRM_REG(pVCpu, bRm));
3537 IEM_MC_ADVANCE_RIP_AND_FINISH();
3538 IEM_MC_END();
3539 }
3540 else
3541 {
3542 /*
3543 * Memory, register.
3544 */
3545 IEM_MC_BEGIN(0, 2);
3546 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3548
3549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3552 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3553
3554 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3555 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3556
3557 IEM_MC_ADVANCE_RIP_AND_FINISH();
3558 IEM_MC_END();
3559 }
3560}
3561
3562/**
3563 * @opcode 0x29
3564 * @oppfx 66
3565 * @opcpuid sse2
3566 * @opgroup og_sse2_pcksclr_datamove
3567 * @opxcpttype 1
3568 * @optest op1=1 op2=2 -> op1=2
3569 * @optest op1=0 op2=-42 -> op1=-42
3570 */
3571FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3572{
3573 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3575 if (IEM_IS_MODRM_REG_MODE(bRm))
3576 {
3577 /*
3578 * Register, register.
3579 */
3580 IEM_MC_BEGIN(0, 0);
3581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3582 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3583 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3584 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3585 IEM_GET_MODRM_REG(pVCpu, bRm));
3586 IEM_MC_ADVANCE_RIP_AND_FINISH();
3587 IEM_MC_END();
3588 }
3589 else
3590 {
3591 /*
3592 * Memory, register.
3593 */
3594 IEM_MC_BEGIN(0, 2);
3595 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3597
3598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3600 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3601 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3602
3603 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3604 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3605
3606 IEM_MC_ADVANCE_RIP_AND_FINISH();
3607 IEM_MC_END();
3608 }
3609}
3610
3611/* Opcode 0xf3 0x0f 0x29 - invalid */
3612/* Opcode 0xf2 0x0f 0x29 - invalid */
3613
3614
3615/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3616FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3617{
3618 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3620 if (IEM_IS_MODRM_REG_MODE(bRm))
3621 {
3622 /*
3623 * XMM, MMX
3624 */
3625 IEM_MC_BEGIN(3, 1);
3626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3627 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3628 IEM_MC_LOCAL(X86XMMREG, Dst);
3629 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3630 IEM_MC_ARG(uint64_t, u64Src, 2);
3631 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3632 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3633 IEM_MC_PREPARE_FPU_USAGE();
3634 IEM_MC_FPU_TO_MMX_MODE();
3635
3636 IEM_MC_REF_MXCSR(pfMxcsr);
3637 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3638 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3639
3640 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3641 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3642 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3643 } IEM_MC_ELSE() {
3644 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3645 } IEM_MC_ENDIF();
3646
3647 IEM_MC_ADVANCE_RIP_AND_FINISH();
3648 IEM_MC_END();
3649 }
3650 else
3651 {
3652 /*
3653 * XMM, [mem64]
3654 */
3655 IEM_MC_BEGIN(3, 2);
3656 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3657 IEM_MC_LOCAL(X86XMMREG, Dst);
3658 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3659 IEM_MC_ARG(uint64_t, u64Src, 2);
3660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3661
3662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3666 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3667
3668 IEM_MC_PREPARE_FPU_USAGE();
3669 IEM_MC_FPU_TO_MMX_MODE();
3670 IEM_MC_REF_MXCSR(pfMxcsr);
3671
3672 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3673 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3674 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3675 } IEM_MC_ELSE() {
3676 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3677 } IEM_MC_ENDIF();
3678
3679 IEM_MC_ADVANCE_RIP_AND_FINISH();
3680 IEM_MC_END();
3681 }
3682}
3683
3684
3685/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3686FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3687{
3688 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3690 if (IEM_IS_MODRM_REG_MODE(bRm))
3691 {
3692 /*
3693 * XMM, MMX
3694 */
3695 IEM_MC_BEGIN(3, 1);
3696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3697 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3698 IEM_MC_LOCAL(X86XMMREG, Dst);
3699 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3700 IEM_MC_ARG(uint64_t, u64Src, 2);
3701 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3702 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3703 IEM_MC_PREPARE_FPU_USAGE();
3704 IEM_MC_FPU_TO_MMX_MODE();
3705
3706 IEM_MC_REF_MXCSR(pfMxcsr);
3707 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3708
3709 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3710 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3711 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3712 } IEM_MC_ELSE() {
3713 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3714 } IEM_MC_ENDIF();
3715
3716 IEM_MC_ADVANCE_RIP_AND_FINISH();
3717 IEM_MC_END();
3718 }
3719 else
3720 {
3721 /*
3722 * XMM, [mem64]
3723 */
3724 IEM_MC_BEGIN(3, 3);
3725 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3726 IEM_MC_LOCAL(X86XMMREG, Dst);
3727 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3728 IEM_MC_ARG(uint64_t, u64Src, 2);
3729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3730
3731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3733 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3735 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3736
3737 /* Doesn't cause a transition to MMX mode. */
3738 IEM_MC_PREPARE_SSE_USAGE();
3739 IEM_MC_REF_MXCSR(pfMxcsr);
3740
3741 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3742 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3743 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3744 } IEM_MC_ELSE() {
3745 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3746 } IEM_MC_ENDIF();
3747
3748 IEM_MC_ADVANCE_RIP_AND_FINISH();
3749 IEM_MC_END();
3750 }
3751}
3752
3753
3754/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3755FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3756{
3757 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3758
3759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3760 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3761 {
3762 if (IEM_IS_MODRM_REG_MODE(bRm))
3763 {
3764 /* XMM, greg64 */
3765 IEM_MC_BEGIN(3, 2);
3766 IEM_MC_LOCAL(uint32_t, fMxcsr);
3767 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3768 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3769 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3770 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3771
3772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3774 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3775
3776 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3777 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3778 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3779 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3780 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3781 } IEM_MC_ELSE() {
3782 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3783 } IEM_MC_ENDIF();
3784
3785 IEM_MC_ADVANCE_RIP_AND_FINISH();
3786 IEM_MC_END();
3787 }
3788 else
3789 {
3790 /* XMM, [mem64] */
3791 IEM_MC_BEGIN(3, 4);
3792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3793 IEM_MC_LOCAL(uint32_t, fMxcsr);
3794 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3795 IEM_MC_LOCAL(int64_t, i64Src);
3796 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3797 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3798 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3799
3800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3802 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3803 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3804
3805 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3806 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3807 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3808 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3809 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3810 } IEM_MC_ELSE() {
3811 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3812 } IEM_MC_ENDIF();
3813
3814 IEM_MC_ADVANCE_RIP_AND_FINISH();
3815 IEM_MC_END();
3816 }
3817 }
3818 else
3819 {
3820 if (IEM_IS_MODRM_REG_MODE(bRm))
3821 {
3822 /* greg, XMM */
3823 IEM_MC_BEGIN(3, 2);
3824 IEM_MC_LOCAL(uint32_t, fMxcsr);
3825 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3826 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3827 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3828 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3829
3830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3831 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3832 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3833
3834 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3835 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3836 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3837 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3838 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3839 } IEM_MC_ELSE() {
3840 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3841 } IEM_MC_ENDIF();
3842
3843 IEM_MC_ADVANCE_RIP_AND_FINISH();
3844 IEM_MC_END();
3845 }
3846 else
3847 {
3848 /* greg, [mem32] */
3849 IEM_MC_BEGIN(3, 4);
3850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3851 IEM_MC_LOCAL(uint32_t, fMxcsr);
3852 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3853 IEM_MC_LOCAL(int32_t, i32Src);
3854 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3855 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3856 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3857
3858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3860 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3861 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3862
3863 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3864 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3865 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3866 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3867 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3868 } IEM_MC_ELSE() {
3869 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3870 } IEM_MC_ENDIF();
3871
3872 IEM_MC_ADVANCE_RIP_AND_FINISH();
3873 IEM_MC_END();
3874 }
3875 }
3876}
3877
3878
3879/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3880FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3881{
3882 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3883
3884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3885 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3886 {
3887 if (IEM_IS_MODRM_REG_MODE(bRm))
3888 {
3889 /* XMM, greg64 */
3890 IEM_MC_BEGIN(3, 2);
3891 IEM_MC_LOCAL(uint32_t, fMxcsr);
3892 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3893 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3894 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3895 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3896
3897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3899 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3900
3901 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3902 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3903 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3904 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3905 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3906 } IEM_MC_ELSE() {
3907 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3908 } IEM_MC_ENDIF();
3909
3910 IEM_MC_ADVANCE_RIP_AND_FINISH();
3911 IEM_MC_END();
3912 }
3913 else
3914 {
3915 /* XMM, [mem64] */
3916 IEM_MC_BEGIN(3, 4);
3917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3918 IEM_MC_LOCAL(uint32_t, fMxcsr);
3919 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3920 IEM_MC_LOCAL(int64_t, i64Src);
3921 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3922 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3923 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3924
3925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3927 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3928 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3929
3930 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3931 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3932 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3933 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3934 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3935 } IEM_MC_ELSE() {
3936 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3937 } IEM_MC_ENDIF();
3938
3939 IEM_MC_ADVANCE_RIP_AND_FINISH();
3940 IEM_MC_END();
3941 }
3942 }
3943 else
3944 {
3945 if (IEM_IS_MODRM_REG_MODE(bRm))
3946 {
3947 /* XMM, greg32 */
3948 IEM_MC_BEGIN(3, 2);
3949 IEM_MC_LOCAL(uint32_t, fMxcsr);
3950 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3951 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3952 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3953 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3954
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3957 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3958
3959 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3960 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3961 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3962 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3963 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3964 } IEM_MC_ELSE() {
3965 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3966 } IEM_MC_ENDIF();
3967
3968 IEM_MC_ADVANCE_RIP_AND_FINISH();
3969 IEM_MC_END();
3970 }
3971 else
3972 {
3973 /* XMM, [mem32] */
3974 IEM_MC_BEGIN(3, 4);
3975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3976 IEM_MC_LOCAL(uint32_t, fMxcsr);
3977 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3978 IEM_MC_LOCAL(int32_t, i32Src);
3979 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3980 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3981 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3982
3983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3985 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3986 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3987
3988 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3989 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3990 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3991 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3992 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3993 } IEM_MC_ELSE() {
3994 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3995 } IEM_MC_ENDIF();
3996
3997 IEM_MC_ADVANCE_RIP_AND_FINISH();
3998 IEM_MC_END();
3999 }
4000 }
4001}
4002
4003
4004/**
4005 * @opcode 0x2b
4006 * @opcodesub !11 mr/reg
4007 * @oppfx none
4008 * @opcpuid sse
4009 * @opgroup og_sse1_cachect
4010 * @opxcpttype 1
4011 * @optest op1=1 op2=2 -> op1=2
4012 * @optest op1=0 op2=-42 -> op1=-42
4013 */
4014FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4015{
4016 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4018 if (IEM_IS_MODRM_MEM_MODE(bRm))
4019 {
4020 /*
4021 * memory, register.
4022 */
4023 IEM_MC_BEGIN(0, 2);
4024 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4026
4027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4031
4032 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4033 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4034
4035 IEM_MC_ADVANCE_RIP_AND_FINISH();
4036 IEM_MC_END();
4037 }
4038 /* The register, register encoding is invalid. */
4039 else
4040 IEMOP_RAISE_INVALID_OPCODE_RET();
4041}
4042
4043/**
4044 * @opcode 0x2b
4045 * @opcodesub !11 mr/reg
4046 * @oppfx 0x66
4047 * @opcpuid sse2
4048 * @opgroup og_sse2_cachect
4049 * @opxcpttype 1
4050 * @optest op1=1 op2=2 -> op1=2
4051 * @optest op1=0 op2=-42 -> op1=-42
4052 */
4053FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4054{
4055 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4057 if (IEM_IS_MODRM_MEM_MODE(bRm))
4058 {
4059 /*
4060 * memory, register.
4061 */
4062 IEM_MC_BEGIN(0, 2);
4063 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4065
4066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4068 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4069 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4070
4071 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4072 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4073
4074 IEM_MC_ADVANCE_RIP_AND_FINISH();
4075 IEM_MC_END();
4076 }
4077 /* The register, register encoding is invalid. */
4078 else
4079 IEMOP_RAISE_INVALID_OPCODE_RET();
4080}
4081/* Opcode 0xf3 0x0f 0x2b - invalid */
4082/* Opcode 0xf2 0x0f 0x2b - invalid */
4083
4084
4085/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4086FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4087{
4088 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4090 if (IEM_IS_MODRM_REG_MODE(bRm))
4091 {
4092 /*
4093 * Register, register.
4094 */
4095 IEM_MC_BEGIN(3, 1);
4096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4097 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4098 IEM_MC_LOCAL(uint64_t, u64Dst);
4099 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4100 IEM_MC_ARG(uint64_t, u64Src, 2);
4101 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4102 IEM_MC_PREPARE_FPU_USAGE();
4103 IEM_MC_FPU_TO_MMX_MODE();
4104
4105 IEM_MC_REF_MXCSR(pfMxcsr);
4106 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4107
4108 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4109 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4110 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4111 } IEM_MC_ELSE() {
4112 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4113 } IEM_MC_ENDIF();
4114
4115 IEM_MC_ADVANCE_RIP_AND_FINISH();
4116 IEM_MC_END();
4117 }
4118 else
4119 {
4120 /*
4121 * Register, memory.
4122 */
4123 IEM_MC_BEGIN(3, 2);
4124 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4125 IEM_MC_LOCAL(uint64_t, u64Dst);
4126 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4127 IEM_MC_ARG(uint64_t, u64Src, 2);
4128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4129
4130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4132 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4133 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4134
4135 IEM_MC_PREPARE_FPU_USAGE();
4136 IEM_MC_FPU_TO_MMX_MODE();
4137 IEM_MC_REF_MXCSR(pfMxcsr);
4138
4139 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4140 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4141 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4142 } IEM_MC_ELSE() {
4143 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4144 } IEM_MC_ENDIF();
4145
4146 IEM_MC_ADVANCE_RIP_AND_FINISH();
4147 IEM_MC_END();
4148 }
4149}
4150
4151
4152/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4153FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4154{
4155 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4157 if (IEM_IS_MODRM_REG_MODE(bRm))
4158 {
4159 /*
4160 * Register, register.
4161 */
4162 IEM_MC_BEGIN(3, 1);
4163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4164 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4165 IEM_MC_LOCAL(uint64_t, u64Dst);
4166 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4167 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4169 IEM_MC_PREPARE_FPU_USAGE();
4170 IEM_MC_FPU_TO_MMX_MODE();
4171
4172 IEM_MC_REF_MXCSR(pfMxcsr);
4173 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4174
4175 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4176 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4177 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4178 } IEM_MC_ELSE() {
4179 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4180 } IEM_MC_ENDIF();
4181
4182 IEM_MC_ADVANCE_RIP_AND_FINISH();
4183 IEM_MC_END();
4184 }
4185 else
4186 {
4187 /*
4188 * Register, memory.
4189 */
4190 IEM_MC_BEGIN(3, 3);
4191 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4192 IEM_MC_LOCAL(uint64_t, u64Dst);
4193 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4194 IEM_MC_LOCAL(X86XMMREG, uSrc);
4195 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4197
4198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4200 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4201 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4202
4203 IEM_MC_PREPARE_FPU_USAGE();
4204 IEM_MC_FPU_TO_MMX_MODE();
4205
4206 IEM_MC_REF_MXCSR(pfMxcsr);
4207
4208 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4209 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4210 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4211 } IEM_MC_ELSE() {
4212 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4213 } IEM_MC_ENDIF();
4214
4215 IEM_MC_ADVANCE_RIP_AND_FINISH();
4216 IEM_MC_END();
4217 }
4218}
4219
4220
4221/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4222FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4223{
4224 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4225
4226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4227 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4228 {
4229 if (IEM_IS_MODRM_REG_MODE(bRm))
4230 {
4231 /* greg64, XMM */
4232 IEM_MC_BEGIN(3, 2);
4233 IEM_MC_LOCAL(uint32_t, fMxcsr);
4234 IEM_MC_LOCAL(int64_t, i64Dst);
4235 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4236 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4237 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4238
4239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4240 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4241 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4242
4243 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4244 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4245 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4246 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4247 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4248 } IEM_MC_ELSE() {
4249 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4250 } IEM_MC_ENDIF();
4251
4252 IEM_MC_ADVANCE_RIP_AND_FINISH();
4253 IEM_MC_END();
4254 }
4255 else
4256 {
4257 /* greg64, [mem64] */
4258 IEM_MC_BEGIN(3, 4);
4259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4260 IEM_MC_LOCAL(uint32_t, fMxcsr);
4261 IEM_MC_LOCAL(int64_t, i64Dst);
4262 IEM_MC_LOCAL(uint32_t, u32Src);
4263 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4264 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4265 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4266
4267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4269 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4270 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4271
4272 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4273 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4274 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4275 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4276 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4277 } IEM_MC_ELSE() {
4278 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4279 } IEM_MC_ENDIF();
4280
4281 IEM_MC_ADVANCE_RIP_AND_FINISH();
4282 IEM_MC_END();
4283 }
4284 }
4285 else
4286 {
4287 if (IEM_IS_MODRM_REG_MODE(bRm))
4288 {
4289 /* greg, XMM */
4290 IEM_MC_BEGIN(3, 2);
4291 IEM_MC_LOCAL(uint32_t, fMxcsr);
4292 IEM_MC_LOCAL(int32_t, i32Dst);
4293 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4294 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4295 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4296
4297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4298 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4299 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4300
4301 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4302 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4303 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4304 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4305 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4306 } IEM_MC_ELSE() {
4307 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4308 } IEM_MC_ENDIF();
4309
4310 IEM_MC_ADVANCE_RIP_AND_FINISH();
4311 IEM_MC_END();
4312 }
4313 else
4314 {
4315 /* greg, [mem] */
4316 IEM_MC_BEGIN(3, 4);
4317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4318 IEM_MC_LOCAL(uint32_t, fMxcsr);
4319 IEM_MC_LOCAL(int32_t, i32Dst);
4320 IEM_MC_LOCAL(uint32_t, u32Src);
4321 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4322 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4323 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4324
4325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4328 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4329
4330 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4331 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4332 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4333 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4334 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4335 } IEM_MC_ELSE() {
4336 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4337 } IEM_MC_ENDIF();
4338
4339 IEM_MC_ADVANCE_RIP_AND_FINISH();
4340 IEM_MC_END();
4341 }
4342 }
4343}
4344
4345
4346/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4347FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4348{
4349 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4350
4351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4352 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4353 {
4354 if (IEM_IS_MODRM_REG_MODE(bRm))
4355 {
4356 /* greg64, XMM */
4357 IEM_MC_BEGIN(3, 2);
4358 IEM_MC_LOCAL(uint32_t, fMxcsr);
4359 IEM_MC_LOCAL(int64_t, i64Dst);
4360 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4361 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4362 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4363
4364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4365 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4366 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4367
4368 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4369 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4370 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4371 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4372 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4373 } IEM_MC_ELSE() {
4374 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4375 } IEM_MC_ENDIF();
4376
4377 IEM_MC_ADVANCE_RIP_AND_FINISH();
4378 IEM_MC_END();
4379 }
4380 else
4381 {
4382 /* greg64, [mem64] */
4383 IEM_MC_BEGIN(3, 4);
4384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4385 IEM_MC_LOCAL(uint32_t, fMxcsr);
4386 IEM_MC_LOCAL(int64_t, i64Dst);
4387 IEM_MC_LOCAL(uint64_t, u64Src);
4388 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4389 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4390 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4391
4392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4395 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4396
4397 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4398 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4399 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4400 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4401 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4402 } IEM_MC_ELSE() {
4403 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4404 } IEM_MC_ENDIF();
4405
4406 IEM_MC_ADVANCE_RIP_AND_FINISH();
4407 IEM_MC_END();
4408 }
4409 }
4410 else
4411 {
4412 if (IEM_IS_MODRM_REG_MODE(bRm))
4413 {
4414 /* greg, XMM */
4415 IEM_MC_BEGIN(3, 2);
4416 IEM_MC_LOCAL(uint32_t, fMxcsr);
4417 IEM_MC_LOCAL(int32_t, i32Dst);
4418 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4419 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4420 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4421
4422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4423 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4424 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4425
4426 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4427 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4428 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4429 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4430 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4431 } IEM_MC_ELSE() {
4432 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4433 } IEM_MC_ENDIF();
4434
4435 IEM_MC_ADVANCE_RIP_AND_FINISH();
4436 IEM_MC_END();
4437 }
4438 else
4439 {
4440 /* greg32, [mem32] */
4441 IEM_MC_BEGIN(3, 4);
4442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4443 IEM_MC_LOCAL(uint32_t, fMxcsr);
4444 IEM_MC_LOCAL(int32_t, i32Dst);
4445 IEM_MC_LOCAL(uint64_t, u64Src);
4446 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4447 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4448 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4449
4450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4452 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4453 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4454
4455 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4456 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4457 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4458 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4459 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4460 } IEM_MC_ELSE() {
4461 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4462 } IEM_MC_ENDIF();
4463
4464 IEM_MC_ADVANCE_RIP_AND_FINISH();
4465 IEM_MC_END();
4466 }
4467 }
4468}
4469
4470
4471/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4472FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4473{
4474 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4476 if (IEM_IS_MODRM_REG_MODE(bRm))
4477 {
4478 /*
4479 * Register, register.
4480 */
4481 IEM_MC_BEGIN(3, 1);
4482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4483 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4484 IEM_MC_LOCAL(uint64_t, u64Dst);
4485 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4486 IEM_MC_ARG(uint64_t, u64Src, 2);
4487
4488 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4489 IEM_MC_PREPARE_FPU_USAGE();
4490 IEM_MC_FPU_TO_MMX_MODE();
4491
4492 IEM_MC_REF_MXCSR(pfMxcsr);
4493 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4494
4495 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4496 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4497 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4498 } IEM_MC_ELSE() {
4499 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4500 } IEM_MC_ENDIF();
4501
4502 IEM_MC_ADVANCE_RIP_AND_FINISH();
4503 IEM_MC_END();
4504 }
4505 else
4506 {
4507 /*
4508 * Register, memory.
4509 */
4510 IEM_MC_BEGIN(3, 2);
4511 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4512 IEM_MC_LOCAL(uint64_t, u64Dst);
4513 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4514 IEM_MC_ARG(uint64_t, u64Src, 2);
4515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4516
4517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4519 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4520 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4521
4522 IEM_MC_PREPARE_FPU_USAGE();
4523 IEM_MC_FPU_TO_MMX_MODE();
4524 IEM_MC_REF_MXCSR(pfMxcsr);
4525
4526 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4527 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4528 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4529 } IEM_MC_ELSE() {
4530 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4531 } IEM_MC_ENDIF();
4532
4533 IEM_MC_ADVANCE_RIP_AND_FINISH();
4534 IEM_MC_END();
4535 }
4536}
4537
4538
4539/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4540FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4541{
4542 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4544 if (IEM_IS_MODRM_REG_MODE(bRm))
4545 {
4546 /*
4547 * Register, register.
4548 */
4549 IEM_MC_BEGIN(3, 1);
4550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4551 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4552 IEM_MC_LOCAL(uint64_t, u64Dst);
4553 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4554 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4555
4556 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4557 IEM_MC_PREPARE_FPU_USAGE();
4558 IEM_MC_FPU_TO_MMX_MODE();
4559
4560 IEM_MC_REF_MXCSR(pfMxcsr);
4561 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4562
4563 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4564 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4565 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4566 } IEM_MC_ELSE() {
4567 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4568 } IEM_MC_ENDIF();
4569
4570 IEM_MC_ADVANCE_RIP_AND_FINISH();
4571 IEM_MC_END();
4572 }
4573 else
4574 {
4575 /*
4576 * Register, memory.
4577 */
4578 IEM_MC_BEGIN(3, 3);
4579 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4580 IEM_MC_LOCAL(uint64_t, u64Dst);
4581 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4582 IEM_MC_LOCAL(X86XMMREG, uSrc);
4583 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4585
4586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4588 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4589 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4590
4591 IEM_MC_PREPARE_FPU_USAGE();
4592 IEM_MC_FPU_TO_MMX_MODE();
4593
4594 IEM_MC_REF_MXCSR(pfMxcsr);
4595
4596 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4597 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4598 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4599 } IEM_MC_ELSE() {
4600 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4601 } IEM_MC_ENDIF();
4602
4603 IEM_MC_ADVANCE_RIP_AND_FINISH();
4604 IEM_MC_END();
4605 }
4606}
4607
4608
4609/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4610FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4611{
4612 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4613
4614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4615 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4616 {
4617 if (IEM_IS_MODRM_REG_MODE(bRm))
4618 {
4619 /* greg64, XMM */
4620 IEM_MC_BEGIN(3, 2);
4621 IEM_MC_LOCAL(uint32_t, fMxcsr);
4622 IEM_MC_LOCAL(int64_t, i64Dst);
4623 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4624 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4625 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4626
4627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4628 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4629 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4630
4631 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4632 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4633 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4634 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4635 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4636 } IEM_MC_ELSE() {
4637 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4638 } IEM_MC_ENDIF();
4639
4640 IEM_MC_ADVANCE_RIP_AND_FINISH();
4641 IEM_MC_END();
4642 }
4643 else
4644 {
4645 /* greg64, [mem64] */
4646 IEM_MC_BEGIN(3, 4);
4647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4648 IEM_MC_LOCAL(uint32_t, fMxcsr);
4649 IEM_MC_LOCAL(int64_t, i64Dst);
4650 IEM_MC_LOCAL(uint32_t, u32Src);
4651 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4652 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4653 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4654
4655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4657 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4658 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4659
4660 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4661 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4662 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4663 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4664 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4665 } IEM_MC_ELSE() {
4666 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4667 } IEM_MC_ENDIF();
4668
4669 IEM_MC_ADVANCE_RIP_AND_FINISH();
4670 IEM_MC_END();
4671 }
4672 }
4673 else
4674 {
4675 if (IEM_IS_MODRM_REG_MODE(bRm))
4676 {
4677 /* greg, XMM */
4678 IEM_MC_BEGIN(3, 2);
4679 IEM_MC_LOCAL(uint32_t, fMxcsr);
4680 IEM_MC_LOCAL(int32_t, i32Dst);
4681 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4682 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4683 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4684
4685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4686 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4687 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4688
4689 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4690 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4691 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4692 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4693 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4694 } IEM_MC_ELSE() {
4695 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4696 } IEM_MC_ENDIF();
4697
4698 IEM_MC_ADVANCE_RIP_AND_FINISH();
4699 IEM_MC_END();
4700 }
4701 else
4702 {
4703 /* greg, [mem] */
4704 IEM_MC_BEGIN(3, 4);
4705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4706 IEM_MC_LOCAL(uint32_t, fMxcsr);
4707 IEM_MC_LOCAL(int32_t, i32Dst);
4708 IEM_MC_LOCAL(uint32_t, u32Src);
4709 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4710 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4711 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4712
4713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4715 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4716 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4717
4718 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4719 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4720 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4721 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4722 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4723 } IEM_MC_ELSE() {
4724 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4725 } IEM_MC_ENDIF();
4726
4727 IEM_MC_ADVANCE_RIP_AND_FINISH();
4728 IEM_MC_END();
4729 }
4730 }
4731}
4732
4733
4734/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4735FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4736{
4737 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4738
4739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4741 {
4742 if (IEM_IS_MODRM_REG_MODE(bRm))
4743 {
4744 /* greg64, XMM */
4745 IEM_MC_BEGIN(3, 2);
4746 IEM_MC_LOCAL(uint32_t, fMxcsr);
4747 IEM_MC_LOCAL(int64_t, i64Dst);
4748 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4749 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4750 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4751
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4754 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4755
4756 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4757 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4758 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4759 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4760 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4761 } IEM_MC_ELSE() {
4762 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4763 } IEM_MC_ENDIF();
4764
4765 IEM_MC_ADVANCE_RIP_AND_FINISH();
4766 IEM_MC_END();
4767 }
4768 else
4769 {
4770 /* greg64, [mem64] */
4771 IEM_MC_BEGIN(3, 4);
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4773 IEM_MC_LOCAL(uint32_t, fMxcsr);
4774 IEM_MC_LOCAL(int64_t, i64Dst);
4775 IEM_MC_LOCAL(uint64_t, u64Src);
4776 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4777 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4778 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4779
4780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4782 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4783 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4784
4785 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4786 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4787 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4788 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4789 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4790 } IEM_MC_ELSE() {
4791 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4792 } IEM_MC_ENDIF();
4793
4794 IEM_MC_ADVANCE_RIP_AND_FINISH();
4795 IEM_MC_END();
4796 }
4797 }
4798 else
4799 {
4800 if (IEM_IS_MODRM_REG_MODE(bRm))
4801 {
4802 /* greg32, XMM */
4803 IEM_MC_BEGIN(3, 2);
4804 IEM_MC_LOCAL(uint32_t, fMxcsr);
4805 IEM_MC_LOCAL(int32_t, i32Dst);
4806 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4807 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4808 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4809
4810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4812 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4813
4814 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4815 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4816 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4817 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4818 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4819 } IEM_MC_ELSE() {
4820 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4821 } IEM_MC_ENDIF();
4822
4823 IEM_MC_ADVANCE_RIP_AND_FINISH();
4824 IEM_MC_END();
4825 }
4826 else
4827 {
4828 /* greg32, [mem64] */
4829 IEM_MC_BEGIN(3, 4);
4830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4831 IEM_MC_LOCAL(uint32_t, fMxcsr);
4832 IEM_MC_LOCAL(int32_t, i32Dst);
4833 IEM_MC_LOCAL(uint64_t, u64Src);
4834 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4835 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4836 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4837
4838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4840 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4841 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4842
4843 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4844 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4845 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4846 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4847 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4848 } IEM_MC_ELSE() {
4849 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4850 } IEM_MC_ENDIF();
4851
4852 IEM_MC_ADVANCE_RIP_AND_FINISH();
4853 IEM_MC_END();
4854 }
4855 }
4856}
4857
4858
4859/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4860FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4861{
4862 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4864 if (IEM_IS_MODRM_REG_MODE(bRm))
4865 {
4866 /*
4867 * Register, register.
4868 */
4869 IEM_MC_BEGIN(4, 1);
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4871 IEM_MC_LOCAL(uint32_t, fEFlags);
4872 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4873 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4874 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4875 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4876 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4877 IEM_MC_PREPARE_SSE_USAGE();
4878 IEM_MC_FETCH_EFLAGS(fEFlags);
4879 IEM_MC_REF_MXCSR(pfMxcsr);
4880 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4881 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4882 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4883 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4884 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4885 } IEM_MC_ELSE() {
4886 IEM_MC_COMMIT_EFLAGS(fEFlags);
4887 } IEM_MC_ENDIF();
4888
4889 IEM_MC_ADVANCE_RIP_AND_FINISH();
4890 IEM_MC_END();
4891 }
4892 else
4893 {
4894 /*
4895 * Register, memory.
4896 */
4897 IEM_MC_BEGIN(4, 3);
4898 IEM_MC_LOCAL(uint32_t, fEFlags);
4899 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4900 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4901 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4902 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4903 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4905
4906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4908 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4909 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4910
4911 IEM_MC_PREPARE_SSE_USAGE();
4912 IEM_MC_FETCH_EFLAGS(fEFlags);
4913 IEM_MC_REF_MXCSR(pfMxcsr);
4914 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4915 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4916 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4917 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4918 } IEM_MC_ELSE() {
4919 IEM_MC_COMMIT_EFLAGS(fEFlags);
4920 } IEM_MC_ENDIF();
4921
4922 IEM_MC_ADVANCE_RIP_AND_FINISH();
4923 IEM_MC_END();
4924 }
4925}
4926
4927
4928/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4929FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4930{
4931 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4933 if (IEM_IS_MODRM_REG_MODE(bRm))
4934 {
4935 /*
4936 * Register, register.
4937 */
4938 IEM_MC_BEGIN(4, 1);
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4940 IEM_MC_LOCAL(uint32_t, fEFlags);
4941 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4942 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4943 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4944 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4945 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4946 IEM_MC_PREPARE_SSE_USAGE();
4947 IEM_MC_FETCH_EFLAGS(fEFlags);
4948 IEM_MC_REF_MXCSR(pfMxcsr);
4949 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4950 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4951 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4952 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4953 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4954 } IEM_MC_ELSE() {
4955 IEM_MC_COMMIT_EFLAGS(fEFlags);
4956 } IEM_MC_ENDIF();
4957
4958 IEM_MC_ADVANCE_RIP_AND_FINISH();
4959 IEM_MC_END();
4960 }
4961 else
4962 {
4963 /*
4964 * Register, memory.
4965 */
4966 IEM_MC_BEGIN(4, 3);
4967 IEM_MC_LOCAL(uint32_t, fEFlags);
4968 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4969 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4970 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4971 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4972 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4974
4975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4977 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4978 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4979
4980 IEM_MC_PREPARE_SSE_USAGE();
4981 IEM_MC_FETCH_EFLAGS(fEFlags);
4982 IEM_MC_REF_MXCSR(pfMxcsr);
4983 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4984 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4985 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4986 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4987 } IEM_MC_ELSE() {
4988 IEM_MC_COMMIT_EFLAGS(fEFlags);
4989 } IEM_MC_ENDIF();
4990
4991 IEM_MC_ADVANCE_RIP_AND_FINISH();
4992 IEM_MC_END();
4993 }
4994}
4995
4996
4997/* Opcode 0xf3 0x0f 0x2e - invalid */
4998/* Opcode 0xf2 0x0f 0x2e - invalid */
4999
5000
5001/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5002FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5003{
5004 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5006 if (IEM_IS_MODRM_REG_MODE(bRm))
5007 {
5008 /*
5009 * Register, register.
5010 */
5011 IEM_MC_BEGIN(4, 1);
5012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5013 IEM_MC_LOCAL(uint32_t, fEFlags);
5014 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5015 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5016 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5017 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5019 IEM_MC_PREPARE_SSE_USAGE();
5020 IEM_MC_FETCH_EFLAGS(fEFlags);
5021 IEM_MC_REF_MXCSR(pfMxcsr);
5022 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5023 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5024 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5025 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5026 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5027 } IEM_MC_ELSE() {
5028 IEM_MC_COMMIT_EFLAGS(fEFlags);
5029 } IEM_MC_ENDIF();
5030
5031 IEM_MC_ADVANCE_RIP_AND_FINISH();
5032 IEM_MC_END();
5033 }
5034 else
5035 {
5036 /*
5037 * Register, memory.
5038 */
5039 IEM_MC_BEGIN(4, 3);
5040 IEM_MC_LOCAL(uint32_t, fEFlags);
5041 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5042 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5043 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5044 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5045 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5047
5048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5050 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5051 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5052
5053 IEM_MC_PREPARE_SSE_USAGE();
5054 IEM_MC_FETCH_EFLAGS(fEFlags);
5055 IEM_MC_REF_MXCSR(pfMxcsr);
5056 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5057 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5058 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5059 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5060 } IEM_MC_ELSE() {
5061 IEM_MC_COMMIT_EFLAGS(fEFlags);
5062 } IEM_MC_ENDIF();
5063
5064 IEM_MC_ADVANCE_RIP_AND_FINISH();
5065 IEM_MC_END();
5066 }
5067}
5068
5069
5070/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5071FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5072{
5073 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5075 if (IEM_IS_MODRM_REG_MODE(bRm))
5076 {
5077 /*
5078 * Register, register.
5079 */
5080 IEM_MC_BEGIN(4, 1);
5081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5082 IEM_MC_LOCAL(uint32_t, fEFlags);
5083 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5084 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5085 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5086 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5087 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5088 IEM_MC_PREPARE_SSE_USAGE();
5089 IEM_MC_FETCH_EFLAGS(fEFlags);
5090 IEM_MC_REF_MXCSR(pfMxcsr);
5091 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5092 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5093 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5094 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5095 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5096 } IEM_MC_ELSE() {
5097 IEM_MC_COMMIT_EFLAGS(fEFlags);
5098 } IEM_MC_ENDIF();
5099
5100 IEM_MC_ADVANCE_RIP_AND_FINISH();
5101 IEM_MC_END();
5102 }
5103 else
5104 {
5105 /*
5106 * Register, memory.
5107 */
5108 IEM_MC_BEGIN(4, 3);
5109 IEM_MC_LOCAL(uint32_t, fEFlags);
5110 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5111 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5112 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5113 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5114 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5116
5117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5119 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5120 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5121
5122 IEM_MC_PREPARE_SSE_USAGE();
5123 IEM_MC_FETCH_EFLAGS(fEFlags);
5124 IEM_MC_REF_MXCSR(pfMxcsr);
5125 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5126 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5127 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5128 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5129 } IEM_MC_ELSE() {
5130 IEM_MC_COMMIT_EFLAGS(fEFlags);
5131 } IEM_MC_ENDIF();
5132
5133 IEM_MC_ADVANCE_RIP_AND_FINISH();
5134 IEM_MC_END();
5135 }
5136}
5137
5138
5139/* Opcode 0xf3 0x0f 0x2f - invalid */
5140/* Opcode 0xf2 0x0f 0x2f - invalid */
5141
5142/** Opcode 0x0f 0x30. */
5143FNIEMOP_DEF(iemOp_wrmsr)
5144{
5145 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5147 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wrmsr);
5148}
5149
5150
5151/** Opcode 0x0f 0x31. */
5152FNIEMOP_DEF(iemOp_rdtsc)
5153{
5154 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5156 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtsc);
5157}
5158
5159
5160/** Opcode 0x0f 0x33. */
5161FNIEMOP_DEF(iemOp_rdmsr)
5162{
5163 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdmsr);
5166}
5167
5168
5169/** Opcode 0x0f 0x34. */
5170FNIEMOP_DEF(iemOp_rdpmc)
5171{
5172 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdpmc);
5175}
5176
5177
5178/** Opcode 0x0f 0x34. */
5179FNIEMOP_DEF(iemOp_sysenter)
5180{
5181 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5183 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5184 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5185 iemCImpl_sysenter);
5186}
5187
5188/** Opcode 0x0f 0x35. */
5189FNIEMOP_DEF(iemOp_sysexit)
5190{
5191 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5193 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5194 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5195 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5196}
5197
5198/** Opcode 0x0f 0x37. */
5199FNIEMOP_STUB(iemOp_getsec);
5200
5201
5202/** Opcode 0x0f 0x38. */
5203FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5204{
5205#ifdef IEM_WITH_THREE_0F_38
5206 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5207 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5208#else
5209 IEMOP_BITCH_ABOUT_STUB();
5210 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5211#endif
5212}
5213
5214
5215/** Opcode 0x0f 0x3a. */
5216FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5217{
5218#ifdef IEM_WITH_THREE_0F_3A
5219 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5220 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5221#else
5222 IEMOP_BITCH_ABOUT_STUB();
5223 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5224#endif
5225}
5226
5227
5228/**
5229 * Implements a conditional move.
5230 *
5231 * Wish there was an obvious way to do this where we could share and reduce
5232 * code bloat.
5233 *
5234 * @param a_Cnd The conditional "microcode" operation.
5235 */
5236#define CMOV_X(a_Cnd) \
5237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5238 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5239 { \
5240 switch (pVCpu->iem.s.enmEffOpSize) \
5241 { \
5242 case IEMMODE_16BIT: \
5243 IEM_MC_BEGIN(0, 1); \
5244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5245 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5246 a_Cnd { \
5247 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5248 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5249 } IEM_MC_ENDIF(); \
5250 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5251 IEM_MC_END(); \
5252 break; \
5253 \
5254 case IEMMODE_32BIT: \
5255 IEM_MC_BEGIN(0, 1); \
5256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5257 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5258 a_Cnd { \
5259 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5260 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5261 } IEM_MC_ELSE() { \
5262 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5263 } IEM_MC_ENDIF(); \
5264 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5265 IEM_MC_END(); \
5266 break; \
5267 \
5268 case IEMMODE_64BIT: \
5269 IEM_MC_BEGIN(0, 1); \
5270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5271 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5272 a_Cnd { \
5273 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5274 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5275 } IEM_MC_ENDIF(); \
5276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5277 IEM_MC_END(); \
5278 break; \
5279 \
5280 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5281 } \
5282 } \
5283 else \
5284 { \
5285 switch (pVCpu->iem.s.enmEffOpSize) \
5286 { \
5287 case IEMMODE_16BIT: \
5288 IEM_MC_BEGIN(0, 2); \
5289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5290 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5293 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5294 a_Cnd { \
5295 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5296 } IEM_MC_ENDIF(); \
5297 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5298 IEM_MC_END(); \
5299 break; \
5300 \
5301 case IEMMODE_32BIT: \
5302 IEM_MC_BEGIN(0, 2); \
5303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5304 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5307 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5308 a_Cnd { \
5309 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5310 } IEM_MC_ELSE() { \
5311 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5312 } IEM_MC_ENDIF(); \
5313 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5314 IEM_MC_END(); \
5315 break; \
5316 \
5317 case IEMMODE_64BIT: \
5318 IEM_MC_BEGIN(0, 2); \
5319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5320 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5323 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5324 a_Cnd { \
5325 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5326 } IEM_MC_ENDIF(); \
5327 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5328 IEM_MC_END(); \
5329 break; \
5330 \
5331 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5332 } \
5333 } do {} while (0)
5334
5335
5336
5337/** Opcode 0x0f 0x40. */
5338FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5339{
5340 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5341 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5342}
5343
5344
5345/** Opcode 0x0f 0x41. */
5346FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5347{
5348 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5349 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5350}
5351
5352
5353/** Opcode 0x0f 0x42. */
5354FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5355{
5356 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5357 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5358}
5359
5360
5361/** Opcode 0x0f 0x43. */
5362FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5363{
5364 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5365 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5366}
5367
5368
5369/** Opcode 0x0f 0x44. */
5370FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5371{
5372 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5373 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5374}
5375
5376
5377/** Opcode 0x0f 0x45. */
5378FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5379{
5380 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5381 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5382}
5383
5384
5385/** Opcode 0x0f 0x46. */
5386FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5387{
5388 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5389 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5390}
5391
5392
5393/** Opcode 0x0f 0x47. */
5394FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5395{
5396 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5397 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5398}
5399
5400
5401/** Opcode 0x0f 0x48. */
5402FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5403{
5404 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5405 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5406}
5407
5408
5409/** Opcode 0x0f 0x49. */
5410FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5411{
5412 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5413 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5414}
5415
5416
5417/** Opcode 0x0f 0x4a. */
5418FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5419{
5420 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5421 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5422}
5423
5424
5425/** Opcode 0x0f 0x4b. */
5426FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5427{
5428 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5429 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5430}
5431
5432
5433/** Opcode 0x0f 0x4c. */
5434FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5435{
5436 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5437 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5438}
5439
5440
5441/** Opcode 0x0f 0x4d. */
5442FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5443{
5444 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5445 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5446}
5447
5448
5449/** Opcode 0x0f 0x4e. */
5450FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5451{
5452 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5453 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5454}
5455
5456
5457/** Opcode 0x0f 0x4f. */
5458FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5459{
5460 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5461 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5462}
5463
5464#undef CMOV_X
5465
5466/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5467FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5468{
5469 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5471 if (IEM_IS_MODRM_REG_MODE(bRm))
5472 {
5473 /*
5474 * Register, register.
5475 */
5476 IEM_MC_BEGIN(2, 1);
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5478 IEM_MC_LOCAL(uint8_t, u8Dst);
5479 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5480 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5482 IEM_MC_PREPARE_SSE_USAGE();
5483 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5484 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5485 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5486 IEM_MC_ADVANCE_RIP_AND_FINISH();
5487 IEM_MC_END();
5488 }
5489 /* No memory operand. */
5490 else
5491 IEMOP_RAISE_INVALID_OPCODE_RET();
5492}
5493
5494
5495/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5496FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5497{
5498 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5500 if (IEM_IS_MODRM_REG_MODE(bRm))
5501 {
5502 /*
5503 * Register, register.
5504 */
5505 IEM_MC_BEGIN(2, 1);
5506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5507 IEM_MC_LOCAL(uint8_t, u8Dst);
5508 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5509 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5510 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5511 IEM_MC_PREPARE_SSE_USAGE();
5512 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5513 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5514 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5515 IEM_MC_ADVANCE_RIP_AND_FINISH();
5516 IEM_MC_END();
5517 }
5518 /* No memory operand. */
5519 else
5520 IEMOP_RAISE_INVALID_OPCODE_RET();
5521
5522}
5523
5524
5525/* Opcode 0xf3 0x0f 0x50 - invalid */
5526/* Opcode 0xf2 0x0f 0x50 - invalid */
5527
5528
5529/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5530FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5531{
5532 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5533 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5534}
5535
5536
5537/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5538FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5539{
5540 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5541 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5542}
5543
5544
5545/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5546FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5547{
5548 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5549 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5550}
5551
5552
5553/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5554FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5555{
5556 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5557 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5558}
5559
5560
5561/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5562FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5563{
5564 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5565 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5566}
5567
5568
5569/* Opcode 0x66 0x0f 0x52 - invalid */
5570
5571
5572/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5573FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5574{
5575 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5576 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5577}
5578
5579
5580/* Opcode 0xf2 0x0f 0x52 - invalid */
5581
5582/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5583FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5584/* Opcode 0x66 0x0f 0x53 - invalid */
5585/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5586FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5587/* Opcode 0xf2 0x0f 0x53 - invalid */
5588
5589
5590/** Opcode 0x0f 0x54 - andps Vps, Wps */
5591FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5592{
5593 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5594 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5595}
5596
5597
5598/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5599FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5600{
5601 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5602 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5603}
5604
5605
5606/* Opcode 0xf3 0x0f 0x54 - invalid */
5607/* Opcode 0xf2 0x0f 0x54 - invalid */
5608
5609
5610/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5611FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5612{
5613 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5614 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5615}
5616
5617
5618/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5619FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5620{
5621 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5622 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5623}
5624
5625
5626/* Opcode 0xf3 0x0f 0x55 - invalid */
5627/* Opcode 0xf2 0x0f 0x55 - invalid */
5628
5629
5630/** Opcode 0x0f 0x56 - orps Vps, Wps */
5631FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5632{
5633 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5634 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5635}
5636
5637
5638/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5639FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5640{
5641 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5642 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5643}
5644
5645
5646/* Opcode 0xf3 0x0f 0x56 - invalid */
5647/* Opcode 0xf2 0x0f 0x56 - invalid */
5648
5649
5650/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5651FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5652{
5653 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5654 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5655}
5656
5657
5658/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5659FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5660{
5661 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5662 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5663}
5664
5665
5666/* Opcode 0xf3 0x0f 0x57 - invalid */
5667/* Opcode 0xf2 0x0f 0x57 - invalid */
5668
5669/** Opcode 0x0f 0x58 - addps Vps, Wps */
5670FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5671{
5672 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5673 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5674}
5675
5676
5677/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5678FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5679{
5680 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5681 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5682}
5683
5684
5685/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5686FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5687{
5688 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5689 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5690}
5691
5692
5693/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5694FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5695{
5696 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5697 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5698}
5699
5700
5701/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5702FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5703{
5704 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5705 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5706}
5707
5708
5709/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5710FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5711{
5712 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5713 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5714}
5715
5716
5717/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5718FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5719{
5720 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5721 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5722}
5723
5724
5725/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5726FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5727{
5728 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5729 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5730}
5731
5732
5733/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5734FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5735{
5736 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5737 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5738}
5739
5740
5741/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5742FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5743{
5744 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5745 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5746}
5747
5748
5749/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5750FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5751{
5752 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5753 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5754}
5755
5756
5757/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5758FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5759{
5760 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5761 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5762}
5763
5764
5765/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5766FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5767{
5768 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5769 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5770}
5771
5772
5773/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5774FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5775{
5776 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5777 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5778}
5779
5780
5781/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5782FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5783{
5784 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5785 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5786}
5787
5788
5789/* Opcode 0xf2 0x0f 0x5b - invalid */
5790
5791
5792/** Opcode 0x0f 0x5c - subps Vps, Wps */
5793FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5794{
5795 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5796 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5797}
5798
5799
5800/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5801FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5802{
5803 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5804 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5805}
5806
5807
5808/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5809FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5810{
5811 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5812 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5813}
5814
5815
5816/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5817FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5818{
5819 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5820 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5821}
5822
5823
5824/** Opcode 0x0f 0x5d - minps Vps, Wps */
5825FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5826{
5827 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5828 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5829}
5830
5831
5832/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5833FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5834{
5835 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5836 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5837}
5838
5839
5840/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5841FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5842{
5843 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5844 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5845}
5846
5847
5848/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5849FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5850{
5851 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5852 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5853}
5854
5855
5856/** Opcode 0x0f 0x5e - divps Vps, Wps */
5857FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5858{
5859 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5860 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5861}
5862
5863
5864/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5865FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5866{
5867 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5868 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5869}
5870
5871
5872/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5873FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5874{
5875 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5876 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5877}
5878
5879
5880/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5881FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5882{
5883 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5884 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5885}
5886
5887
5888/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5889FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5890{
5891 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5892 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5893}
5894
5895
5896/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5897FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5898{
5899 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5900 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5901}
5902
5903
5904/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5905FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5906{
5907 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5908 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5909}
5910
5911
5912/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5913FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5914{
5915 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5916 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5917}
5918
5919
5920/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5921FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5922{
5923 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5924 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5925}
5926
5927
5928/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5929FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5930{
5931 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5932 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5933}
5934
5935
5936/* Opcode 0xf3 0x0f 0x60 - invalid */
5937
5938
5939/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5940FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5941{
5942 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5943 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5944 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5945}
5946
5947
5948/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5949FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5950{
5951 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5952 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5953}
5954
5955
5956/* Opcode 0xf3 0x0f 0x61 - invalid */
5957
5958
5959/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5960FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5961{
5962 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5963 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5964}
5965
5966
5967/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5968FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5969{
5970 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5971 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5972}
5973
5974
5975/* Opcode 0xf3 0x0f 0x62 - invalid */
5976
5977
5978
5979/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5980FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5981{
5982 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5983 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5984}
5985
5986
5987/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5988FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5989{
5990 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5991 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5992}
5993
5994
5995/* Opcode 0xf3 0x0f 0x63 - invalid */
5996
5997
5998/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5999FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
6000{
6001 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6002 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
6003}
6004
6005
6006/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6007FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6008{
6009 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6010 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6011}
6012
6013
6014/* Opcode 0xf3 0x0f 0x64 - invalid */
6015
6016
6017/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6018FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6019{
6020 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6021 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6022}
6023
6024
6025/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6026FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6027{
6028 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6029 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6030}
6031
6032
6033/* Opcode 0xf3 0x0f 0x65 - invalid */
6034
6035
6036/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6037FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6038{
6039 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6040 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6041}
6042
6043
6044/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6045FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6046{
6047 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6048 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6049}
6050
6051
6052/* Opcode 0xf3 0x0f 0x66 - invalid */
6053
6054
6055/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6056FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6057{
6058 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6059 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6060}
6061
6062
6063/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6064FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6065{
6066 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6067 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6068}
6069
6070
6071/* Opcode 0xf3 0x0f 0x67 - invalid */
6072
6073
6074/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6075 * @note Intel and AMD both uses Qd for the second parameter, however they
6076 * both list it as a mmX/mem64 operand and intel describes it as being
6077 * loaded as a qword, so it should be Qq, shouldn't it? */
6078FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6079{
6080 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6081 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6082}
6083
6084
6085/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6086FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6087{
6088 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6089 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6090}
6091
6092
6093/* Opcode 0xf3 0x0f 0x68 - invalid */
6094
6095
6096/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6097 * @note Intel and AMD both uses Qd for the second parameter, however they
6098 * both list it as a mmX/mem64 operand and intel describes it as being
6099 * loaded as a qword, so it should be Qq, shouldn't it? */
6100FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6101{
6102 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6103 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6104}
6105
6106
6107/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6108FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6109{
6110 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6111 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6112
6113}
6114
6115
6116/* Opcode 0xf3 0x0f 0x69 - invalid */
6117
6118
6119/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6120 * @note Intel and AMD both uses Qd for the second parameter, however they
6121 * both list it as a mmX/mem64 operand and intel describes it as being
6122 * loaded as a qword, so it should be Qq, shouldn't it? */
6123FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6124{
6125 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6126 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6127}
6128
6129
6130/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6131FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6132{
6133 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6134 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6135}
6136
6137
6138/* Opcode 0xf3 0x0f 0x6a - invalid */
6139
6140
6141/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6142FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6143{
6144 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6145 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6146}
6147
6148
6149/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6150FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6151{
6152 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6153 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6154}
6155
6156
6157/* Opcode 0xf3 0x0f 0x6b - invalid */
6158
6159
6160/* Opcode 0x0f 0x6c - invalid */
6161
6162
6163/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6164FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6165{
6166 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6167 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6168}
6169
6170
6171/* Opcode 0xf3 0x0f 0x6c - invalid */
6172/* Opcode 0xf2 0x0f 0x6c - invalid */
6173
6174
6175/* Opcode 0x0f 0x6d - invalid */
6176
6177
6178/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6179FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6180{
6181 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6182 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6183}
6184
6185
6186/* Opcode 0xf3 0x0f 0x6d - invalid */
6187
6188
6189FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6190{
6191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6192 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6193 {
6194 /**
6195 * @opcode 0x6e
6196 * @opcodesub rex.w=1
6197 * @oppfx none
6198 * @opcpuid mmx
6199 * @opgroup og_mmx_datamove
6200 * @opxcpttype 5
6201 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6202 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6203 */
6204 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6205 if (IEM_IS_MODRM_REG_MODE(bRm))
6206 {
6207 /* MMX, greg64 */
6208 IEM_MC_BEGIN(0, 1);
6209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6210 IEM_MC_LOCAL(uint64_t, u64Tmp);
6211
6212 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6213 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6214 IEM_MC_FPU_TO_MMX_MODE();
6215
6216 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6217 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6218
6219 IEM_MC_ADVANCE_RIP_AND_FINISH();
6220 IEM_MC_END();
6221 }
6222 else
6223 {
6224 /* MMX, [mem64] */
6225 IEM_MC_BEGIN(0, 2);
6226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6227 IEM_MC_LOCAL(uint64_t, u64Tmp);
6228
6229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6231 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6232 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6233 IEM_MC_FPU_TO_MMX_MODE();
6234
6235 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6236 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6237
6238 IEM_MC_ADVANCE_RIP_AND_FINISH();
6239 IEM_MC_END();
6240 }
6241 }
6242 else
6243 {
6244 /**
6245 * @opdone
6246 * @opcode 0x6e
6247 * @opcodesub rex.w=0
6248 * @oppfx none
6249 * @opcpuid mmx
6250 * @opgroup og_mmx_datamove
6251 * @opxcpttype 5
6252 * @opfunction iemOp_movd_q_Pd_Ey
6253 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6254 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6255 */
6256 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6257 if (IEM_IS_MODRM_REG_MODE(bRm))
6258 {
6259 /* MMX, greg32 */
6260 IEM_MC_BEGIN(0, 1);
6261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6262 IEM_MC_LOCAL(uint32_t, u32Tmp);
6263
6264 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6265 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6266 IEM_MC_FPU_TO_MMX_MODE();
6267
6268 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6269 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6270
6271 IEM_MC_ADVANCE_RIP_AND_FINISH();
6272 IEM_MC_END();
6273 }
6274 else
6275 {
6276 /* MMX, [mem32] */
6277 IEM_MC_BEGIN(0, 2);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6279 IEM_MC_LOCAL(uint32_t, u32Tmp);
6280
6281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6284 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6285 IEM_MC_FPU_TO_MMX_MODE();
6286
6287 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6288 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6289
6290 IEM_MC_ADVANCE_RIP_AND_FINISH();
6291 IEM_MC_END();
6292 }
6293 }
6294}
6295
6296FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6297{
6298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6299 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6300 {
6301 /**
6302 * @opcode 0x6e
6303 * @opcodesub rex.w=1
6304 * @oppfx 0x66
6305 * @opcpuid sse2
6306 * @opgroup og_sse2_simdint_datamove
6307 * @opxcpttype 5
6308 * @optest 64-bit / op1=1 op2=2 -> op1=2
6309 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6310 */
6311 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6312 if (IEM_IS_MODRM_REG_MODE(bRm))
6313 {
6314 /* XMM, greg64 */
6315 IEM_MC_BEGIN(0, 1);
6316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6317 IEM_MC_LOCAL(uint64_t, u64Tmp);
6318
6319 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6320 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6321
6322 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6323 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6324
6325 IEM_MC_ADVANCE_RIP_AND_FINISH();
6326 IEM_MC_END();
6327 }
6328 else
6329 {
6330 /* XMM, [mem64] */
6331 IEM_MC_BEGIN(0, 2);
6332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6333 IEM_MC_LOCAL(uint64_t, u64Tmp);
6334
6335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6337 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6338 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6339
6340 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6341 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6342
6343 IEM_MC_ADVANCE_RIP_AND_FINISH();
6344 IEM_MC_END();
6345 }
6346 }
6347 else
6348 {
6349 /**
6350 * @opdone
6351 * @opcode 0x6e
6352 * @opcodesub rex.w=0
6353 * @oppfx 0x66
6354 * @opcpuid sse2
6355 * @opgroup og_sse2_simdint_datamove
6356 * @opxcpttype 5
6357 * @opfunction iemOp_movd_q_Vy_Ey
6358 * @optest op1=1 op2=2 -> op1=2
6359 * @optest op1=0 op2=-42 -> op1=-42
6360 */
6361 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6362 if (IEM_IS_MODRM_REG_MODE(bRm))
6363 {
6364 /* XMM, greg32 */
6365 IEM_MC_BEGIN(0, 1);
6366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6367 IEM_MC_LOCAL(uint32_t, u32Tmp);
6368
6369 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6370 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6371
6372 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6373 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6374
6375 IEM_MC_ADVANCE_RIP_AND_FINISH();
6376 IEM_MC_END();
6377 }
6378 else
6379 {
6380 /* XMM, [mem32] */
6381 IEM_MC_BEGIN(0, 2);
6382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6383 IEM_MC_LOCAL(uint32_t, u32Tmp);
6384
6385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6387 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6388 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6389
6390 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6391 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6392
6393 IEM_MC_ADVANCE_RIP_AND_FINISH();
6394 IEM_MC_END();
6395 }
6396 }
6397}
6398
6399/* Opcode 0xf3 0x0f 0x6e - invalid */
6400
6401
6402/**
6403 * @opcode 0x6f
6404 * @oppfx none
6405 * @opcpuid mmx
6406 * @opgroup og_mmx_datamove
6407 * @opxcpttype 5
6408 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6409 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6410 */
6411FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6412{
6413 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6415 if (IEM_IS_MODRM_REG_MODE(bRm))
6416 {
6417 /*
6418 * Register, register.
6419 */
6420 IEM_MC_BEGIN(0, 1);
6421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6422 IEM_MC_LOCAL(uint64_t, u64Tmp);
6423
6424 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6425 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6426 IEM_MC_FPU_TO_MMX_MODE();
6427
6428 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6429 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6430
6431 IEM_MC_ADVANCE_RIP_AND_FINISH();
6432 IEM_MC_END();
6433 }
6434 else
6435 {
6436 /*
6437 * Register, memory.
6438 */
6439 IEM_MC_BEGIN(0, 2);
6440 IEM_MC_LOCAL(uint64_t, u64Tmp);
6441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6442
6443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6445 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6446 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6447 IEM_MC_FPU_TO_MMX_MODE();
6448
6449 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6450 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6451
6452 IEM_MC_ADVANCE_RIP_AND_FINISH();
6453 IEM_MC_END();
6454 }
6455}
6456
6457/**
6458 * @opcode 0x6f
6459 * @oppfx 0x66
6460 * @opcpuid sse2
6461 * @opgroup og_sse2_simdint_datamove
6462 * @opxcpttype 1
6463 * @optest op1=1 op2=2 -> op1=2
6464 * @optest op1=0 op2=-42 -> op1=-42
6465 */
6466FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6467{
6468 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6470 if (IEM_IS_MODRM_REG_MODE(bRm))
6471 {
6472 /*
6473 * Register, register.
6474 */
6475 IEM_MC_BEGIN(0, 0);
6476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6477
6478 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6479 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6480
6481 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6482 IEM_GET_MODRM_RM(pVCpu, bRm));
6483 IEM_MC_ADVANCE_RIP_AND_FINISH();
6484 IEM_MC_END();
6485 }
6486 else
6487 {
6488 /*
6489 * Register, memory.
6490 */
6491 IEM_MC_BEGIN(0, 2);
6492 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6494
6495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6499
6500 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6501 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6502
6503 IEM_MC_ADVANCE_RIP_AND_FINISH();
6504 IEM_MC_END();
6505 }
6506}
6507
6508/**
6509 * @opcode 0x6f
6510 * @oppfx 0xf3
6511 * @opcpuid sse2
6512 * @opgroup og_sse2_simdint_datamove
6513 * @opxcpttype 4UA
6514 * @optest op1=1 op2=2 -> op1=2
6515 * @optest op1=0 op2=-42 -> op1=-42
6516 */
6517FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6518{
6519 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6521 if (IEM_IS_MODRM_REG_MODE(bRm))
6522 {
6523 /*
6524 * Register, register.
6525 */
6526 IEM_MC_BEGIN(0, 0);
6527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6528 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6529 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6530 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6531 IEM_GET_MODRM_RM(pVCpu, bRm));
6532 IEM_MC_ADVANCE_RIP_AND_FINISH();
6533 IEM_MC_END();
6534 }
6535 else
6536 {
6537 /*
6538 * Register, memory.
6539 */
6540 IEM_MC_BEGIN(0, 2);
6541 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6543
6544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6546 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6547 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6548 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6549 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6550
6551 IEM_MC_ADVANCE_RIP_AND_FINISH();
6552 IEM_MC_END();
6553 }
6554}
6555
6556
6557/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6558FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6559{
6560 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6562 if (IEM_IS_MODRM_REG_MODE(bRm))
6563 {
6564 /*
6565 * Register, register.
6566 */
6567 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6568 IEM_MC_BEGIN(3, 0);
6569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6570 IEM_MC_ARG(uint64_t *, pDst, 0);
6571 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6572 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6573 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6574 IEM_MC_PREPARE_FPU_USAGE();
6575 IEM_MC_FPU_TO_MMX_MODE();
6576
6577 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6578 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6579 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6580 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6581
6582 IEM_MC_ADVANCE_RIP_AND_FINISH();
6583 IEM_MC_END();
6584 }
6585 else
6586 {
6587 /*
6588 * Register, memory.
6589 */
6590 IEM_MC_BEGIN(3, 2);
6591 IEM_MC_ARG(uint64_t *, pDst, 0);
6592 IEM_MC_LOCAL(uint64_t, uSrc);
6593 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6595
6596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6597 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6598 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6600 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6601 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6602
6603 IEM_MC_PREPARE_FPU_USAGE();
6604 IEM_MC_FPU_TO_MMX_MODE();
6605
6606 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6607 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6608 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6609
6610 IEM_MC_ADVANCE_RIP_AND_FINISH();
6611 IEM_MC_END();
6612 }
6613}
6614
6615
6616/**
6617 * Common worker for SSE2 instructions on the forms:
6618 * pshufd xmm1, xmm2/mem128, imm8
6619 * pshufhw xmm1, xmm2/mem128, imm8
6620 * pshuflw xmm1, xmm2/mem128, imm8
6621 *
6622 * Proper alignment of the 128-bit operand is enforced.
6623 * Exceptions type 4. SSE2 cpuid checks.
6624 */
6625FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6626{
6627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6628 if (IEM_IS_MODRM_REG_MODE(bRm))
6629 {
6630 /*
6631 * Register, register.
6632 */
6633 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6634 IEM_MC_BEGIN(3, 0);
6635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6636 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6637 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6638 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6639 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6640 IEM_MC_PREPARE_SSE_USAGE();
6641 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6642 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6643 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6644 IEM_MC_ADVANCE_RIP_AND_FINISH();
6645 IEM_MC_END();
6646 }
6647 else
6648 {
6649 /*
6650 * Register, memory.
6651 */
6652 IEM_MC_BEGIN(3, 2);
6653 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6654 IEM_MC_LOCAL(RTUINT128U, uSrc);
6655 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6657
6658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6659 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6660 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6662 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6663
6664 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6665 IEM_MC_PREPARE_SSE_USAGE();
6666 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6667 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6668
6669 IEM_MC_ADVANCE_RIP_AND_FINISH();
6670 IEM_MC_END();
6671 }
6672}
6673
6674
6675/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6676FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6677{
6678 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6679 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6680}
6681
6682
6683/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6684FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6685{
6686 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6687 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6688}
6689
6690
6691/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6692FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6693{
6694 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6695 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6696}
6697
6698
6699/**
6700 * Common worker for MMX instructions of the form:
6701 * psrlw mm, imm8
6702 * psraw mm, imm8
6703 * psllw mm, imm8
6704 * psrld mm, imm8
6705 * psrad mm, imm8
6706 * pslld mm, imm8
6707 * psrlq mm, imm8
6708 * psllq mm, imm8
6709 *
6710 */
6711FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6712{
6713 if (IEM_IS_MODRM_REG_MODE(bRm))
6714 {
6715 /*
6716 * Register, immediate.
6717 */
6718 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6719 IEM_MC_BEGIN(2, 0);
6720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6721 IEM_MC_ARG(uint64_t *, pDst, 0);
6722 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6723 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6724 IEM_MC_PREPARE_FPU_USAGE();
6725 IEM_MC_FPU_TO_MMX_MODE();
6726
6727 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6728 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6729 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6730
6731 IEM_MC_ADVANCE_RIP_AND_FINISH();
6732 IEM_MC_END();
6733 }
6734 else
6735 {
6736 /*
6737 * Register, memory not supported.
6738 */
6739 /// @todo Caller already enforced register mode?!
6740 AssertFailedReturn(VINF_SUCCESS);
6741 }
6742}
6743
6744
6745/**
6746 * Common worker for SSE2 instructions of the form:
6747 * psrlw xmm, imm8
6748 * psraw xmm, imm8
6749 * psllw xmm, imm8
6750 * psrld xmm, imm8
6751 * psrad xmm, imm8
6752 * pslld xmm, imm8
6753 * psrlq xmm, imm8
6754 * psllq xmm, imm8
6755 *
6756 */
6757FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6758{
6759 if (IEM_IS_MODRM_REG_MODE(bRm))
6760 {
6761 /*
6762 * Register, immediate.
6763 */
6764 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6765 IEM_MC_BEGIN(2, 0);
6766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6767 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6768 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6769 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6770 IEM_MC_PREPARE_SSE_USAGE();
6771 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6772 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6773 IEM_MC_ADVANCE_RIP_AND_FINISH();
6774 IEM_MC_END();
6775 }
6776 else
6777 {
6778 /*
6779 * Register, memory.
6780 */
6781 /// @todo Caller already enforced register mode?!
6782 AssertFailedReturn(VINF_SUCCESS);
6783 }
6784}
6785
6786
6787/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6788FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6789{
6790// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6791 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6792}
6793
6794
6795/** Opcode 0x66 0x0f 0x71 11/2. */
6796FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6797{
6798// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6799 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6800}
6801
6802
6803/** Opcode 0x0f 0x71 11/4. */
6804FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6805{
6806// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6807 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6808}
6809
6810
6811/** Opcode 0x66 0x0f 0x71 11/4. */
6812FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6813{
6814// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6815 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6816}
6817
6818
6819/** Opcode 0x0f 0x71 11/6. */
6820FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6821{
6822// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6823 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6824}
6825
6826
6827/** Opcode 0x66 0x0f 0x71 11/6. */
6828FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6829{
6830// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6831 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6832}
6833
6834
6835/**
6836 * Group 12 jump table for register variant.
6837 */
6838IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6839{
6840 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6841 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6842 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6843 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6844 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6845 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6846 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6847 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6848};
6849AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6850
6851
6852/** Opcode 0x0f 0x71. */
6853FNIEMOP_DEF(iemOp_Grp12)
6854{
6855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6856 if (IEM_IS_MODRM_REG_MODE(bRm))
6857 /* register, register */
6858 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6859 + pVCpu->iem.s.idxPrefix], bRm);
6860 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6861}
6862
6863
6864/** Opcode 0x0f 0x72 11/2. */
6865FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6866{
6867// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6868 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6869}
6870
6871
6872/** Opcode 0x66 0x0f 0x72 11/2. */
6873FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6874{
6875// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6876 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6877}
6878
6879
6880/** Opcode 0x0f 0x72 11/4. */
6881FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6882{
6883// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6884 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6885}
6886
6887
6888/** Opcode 0x66 0x0f 0x72 11/4. */
6889FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6890{
6891// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6892 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6893}
6894
6895
6896/** Opcode 0x0f 0x72 11/6. */
6897FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6898{
6899// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6900 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6901}
6902
6903/** Opcode 0x66 0x0f 0x72 11/6. */
6904FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6905{
6906// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6907 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6908}
6909
6910
6911/**
6912 * Group 13 jump table for register variant.
6913 */
6914IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6915{
6916 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6917 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6918 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6919 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6920 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6921 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6922 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6923 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6924};
6925AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6926
6927/** Opcode 0x0f 0x72. */
6928FNIEMOP_DEF(iemOp_Grp13)
6929{
6930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6931 if (IEM_IS_MODRM_REG_MODE(bRm))
6932 /* register, register */
6933 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6934 + pVCpu->iem.s.idxPrefix], bRm);
6935 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6936}
6937
6938
6939/** Opcode 0x0f 0x73 11/2. */
6940FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6941{
6942// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6943 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6944}
6945
6946
6947/** Opcode 0x66 0x0f 0x73 11/2. */
6948FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6949{
6950// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6951 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6952}
6953
6954
6955/** Opcode 0x66 0x0f 0x73 11/3. */
6956FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6957{
6958// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6959 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6960}
6961
6962
6963/** Opcode 0x0f 0x73 11/6. */
6964FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6965{
6966// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6967 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6968}
6969
6970
6971/** Opcode 0x66 0x0f 0x73 11/6. */
6972FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6973{
6974// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6975 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6976}
6977
6978
6979/** Opcode 0x66 0x0f 0x73 11/7. */
6980FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6981{
6982// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6983 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6984}
6985
6986/**
6987 * Group 14 jump table for register variant.
6988 */
6989IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6990{
6991 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6992 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6993 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6994 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6995 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6996 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6997 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6998 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6999};
7000AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
7001
7002
7003/** Opcode 0x0f 0x73. */
7004FNIEMOP_DEF(iemOp_Grp14)
7005{
7006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7007 if (IEM_IS_MODRM_REG_MODE(bRm))
7008 /* register, register */
7009 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7010 + pVCpu->iem.s.idxPrefix], bRm);
7011 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7012}
7013
7014
7015/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7016FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7017{
7018 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7019 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7020}
7021
7022
7023/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7024FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7025{
7026 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7027 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7028}
7029
7030
7031/* Opcode 0xf3 0x0f 0x74 - invalid */
7032/* Opcode 0xf2 0x0f 0x74 - invalid */
7033
7034
7035/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7036FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7037{
7038 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7039 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7040}
7041
7042
7043/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7044FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7045{
7046 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7047 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7048}
7049
7050
7051/* Opcode 0xf3 0x0f 0x75 - invalid */
7052/* Opcode 0xf2 0x0f 0x75 - invalid */
7053
7054
7055/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7056FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7057{
7058 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7059 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7060}
7061
7062
7063/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7064FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7065{
7066 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7067 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7068}
7069
7070
7071/* Opcode 0xf3 0x0f 0x76 - invalid */
7072/* Opcode 0xf2 0x0f 0x76 - invalid */
7073
7074
7075/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7076FNIEMOP_DEF(iemOp_emms)
7077{
7078 IEMOP_MNEMONIC(emms, "emms");
7079 IEM_MC_BEGIN(0,0);
7080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7082 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7083 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7084 IEM_MC_FPU_FROM_MMX_MODE();
7085 IEM_MC_ADVANCE_RIP_AND_FINISH();
7086 IEM_MC_END();
7087}
7088
7089/* Opcode 0x66 0x0f 0x77 - invalid */
7090/* Opcode 0xf3 0x0f 0x77 - invalid */
7091/* Opcode 0xf2 0x0f 0x77 - invalid */
7092
7093/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7094#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7095FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7096{
7097 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7098 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7099 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7100 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7101
7102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7103 if (IEM_IS_MODRM_REG_MODE(bRm))
7104 {
7105 /*
7106 * Register, register.
7107 */
7108 if (enmEffOpSize == IEMMODE_64BIT)
7109 {
7110 IEM_MC_BEGIN(2, 0);
7111 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7112 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7113 IEM_MC_ARG(uint64_t, u64Enc, 1);
7114 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7115 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7116 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7117 IEM_MC_END();
7118 }
7119 else
7120 {
7121 IEM_MC_BEGIN(2, 0);
7122 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7123 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7124 IEM_MC_ARG(uint32_t, u32Enc, 1);
7125 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7126 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7127 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7128 IEM_MC_END();
7129 }
7130 }
7131 else
7132 {
7133 /*
7134 * Memory, register.
7135 */
7136 if (enmEffOpSize == IEMMODE_64BIT)
7137 {
7138 IEM_MC_BEGIN(3, 0);
7139 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7140 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7141 IEM_MC_ARG(uint64_t, u64Enc, 2);
7142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7143 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7144 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7145 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7146 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7147 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7148 IEM_MC_END();
7149 }
7150 else
7151 {
7152 IEM_MC_BEGIN(3, 0);
7153 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7154 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7155 IEM_MC_ARG(uint32_t, u32Enc, 2);
7156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7157 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7158 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7159 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7160 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7161 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7162 IEM_MC_END();
7163 }
7164 }
7165}
7166#else
7167FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7168#endif
7169
7170/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7171FNIEMOP_STUB(iemOp_AmdGrp17);
7172/* Opcode 0xf3 0x0f 0x78 - invalid */
7173/* Opcode 0xf2 0x0f 0x78 - invalid */
7174
7175/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7176#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7177FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7178{
7179 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7180 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7181 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7182 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7183
7184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7185 if (IEM_IS_MODRM_REG_MODE(bRm))
7186 {
7187 /*
7188 * Register, register.
7189 */
7190 if (enmEffOpSize == IEMMODE_64BIT)
7191 {
7192 IEM_MC_BEGIN(2, 0);
7193 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7194 IEM_MC_ARG(uint64_t, u64Val, 0);
7195 IEM_MC_ARG(uint64_t, u64Enc, 1);
7196 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7197 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7198 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7199 IEM_MC_END();
7200 }
7201 else
7202 {
7203 IEM_MC_BEGIN(2, 0);
7204 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7205 IEM_MC_ARG(uint32_t, u32Val, 0);
7206 IEM_MC_ARG(uint32_t, u32Enc, 1);
7207 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7208 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7209 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7210 IEM_MC_END();
7211 }
7212 }
7213 else
7214 {
7215 /*
7216 * Register, memory.
7217 */
7218 if (enmEffOpSize == IEMMODE_64BIT)
7219 {
7220 IEM_MC_BEGIN(3, 0);
7221 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7222 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7223 IEM_MC_ARG(uint64_t, u64Enc, 2);
7224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7225 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7226 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7227 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7228 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7229 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7230 IEM_MC_END();
7231 }
7232 else
7233 {
7234 IEM_MC_BEGIN(3, 0);
7235 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7236 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7237 IEM_MC_ARG(uint32_t, u32Enc, 2);
7238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7239 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7240 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7241 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7242 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7243 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7244 IEM_MC_END();
7245 }
7246 }
7247}
7248#else
7249FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7250#endif
7251/* Opcode 0x66 0x0f 0x79 - invalid */
7252/* Opcode 0xf3 0x0f 0x79 - invalid */
7253/* Opcode 0xf2 0x0f 0x79 - invalid */
7254
7255/* Opcode 0x0f 0x7a - invalid */
7256/* Opcode 0x66 0x0f 0x7a - invalid */
7257/* Opcode 0xf3 0x0f 0x7a - invalid */
7258/* Opcode 0xf2 0x0f 0x7a - invalid */
7259
7260/* Opcode 0x0f 0x7b - invalid */
7261/* Opcode 0x66 0x0f 0x7b - invalid */
7262/* Opcode 0xf3 0x0f 0x7b - invalid */
7263/* Opcode 0xf2 0x0f 0x7b - invalid */
7264
7265/* Opcode 0x0f 0x7c - invalid */
7266
7267
7268/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7269FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7270{
7271 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7272 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7273}
7274
7275
7276/* Opcode 0xf3 0x0f 0x7c - invalid */
7277
7278
7279/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7280FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7281{
7282 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7283 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7284}
7285
7286
7287/* Opcode 0x0f 0x7d - invalid */
7288
7289
7290/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7291FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7292{
7293 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7294 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7295}
7296
7297
7298/* Opcode 0xf3 0x0f 0x7d - invalid */
7299
7300
7301/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7302FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7303{
7304 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7305 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7306}
7307
7308
7309/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7310FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7311{
7312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7313 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7314 {
7315 /**
7316 * @opcode 0x7e
7317 * @opcodesub rex.w=1
7318 * @oppfx none
7319 * @opcpuid mmx
7320 * @opgroup og_mmx_datamove
7321 * @opxcpttype 5
7322 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7323 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7324 */
7325 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7326 if (IEM_IS_MODRM_REG_MODE(bRm))
7327 {
7328 /* greg64, MMX */
7329 IEM_MC_BEGIN(0, 1);
7330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7331 IEM_MC_LOCAL(uint64_t, u64Tmp);
7332
7333 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7334 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7335 IEM_MC_FPU_TO_MMX_MODE();
7336
7337 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7338 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7339
7340 IEM_MC_ADVANCE_RIP_AND_FINISH();
7341 IEM_MC_END();
7342 }
7343 else
7344 {
7345 /* [mem64], MMX */
7346 IEM_MC_BEGIN(0, 2);
7347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7348 IEM_MC_LOCAL(uint64_t, u64Tmp);
7349
7350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7352 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7353 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7354 IEM_MC_FPU_TO_MMX_MODE();
7355
7356 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7357 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7358
7359 IEM_MC_ADVANCE_RIP_AND_FINISH();
7360 IEM_MC_END();
7361 }
7362 }
7363 else
7364 {
7365 /**
7366 * @opdone
7367 * @opcode 0x7e
7368 * @opcodesub rex.w=0
7369 * @oppfx none
7370 * @opcpuid mmx
7371 * @opgroup og_mmx_datamove
7372 * @opxcpttype 5
7373 * @opfunction iemOp_movd_q_Pd_Ey
7374 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7375 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7376 */
7377 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7378 if (IEM_IS_MODRM_REG_MODE(bRm))
7379 {
7380 /* greg32, MMX */
7381 IEM_MC_BEGIN(0, 1);
7382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7383 IEM_MC_LOCAL(uint32_t, u32Tmp);
7384
7385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7386 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7387 IEM_MC_FPU_TO_MMX_MODE();
7388
7389 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7390 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7391
7392 IEM_MC_ADVANCE_RIP_AND_FINISH();
7393 IEM_MC_END();
7394 }
7395 else
7396 {
7397 /* [mem32], MMX */
7398 IEM_MC_BEGIN(0, 2);
7399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7400 IEM_MC_LOCAL(uint32_t, u32Tmp);
7401
7402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7404 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7405 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7406 IEM_MC_FPU_TO_MMX_MODE();
7407
7408 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7409 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7410
7411 IEM_MC_ADVANCE_RIP_AND_FINISH();
7412 IEM_MC_END();
7413 }
7414 }
7415}
7416
7417
7418FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7419{
7420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7421 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7422 {
7423 /**
7424 * @opcode 0x7e
7425 * @opcodesub rex.w=1
7426 * @oppfx 0x66
7427 * @opcpuid sse2
7428 * @opgroup og_sse2_simdint_datamove
7429 * @opxcpttype 5
7430 * @optest 64-bit / op1=1 op2=2 -> op1=2
7431 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7432 */
7433 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7434 if (IEM_IS_MODRM_REG_MODE(bRm))
7435 {
7436 /* greg64, XMM */
7437 IEM_MC_BEGIN(0, 1);
7438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7439 IEM_MC_LOCAL(uint64_t, u64Tmp);
7440
7441 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7443
7444 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7445 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7446
7447 IEM_MC_ADVANCE_RIP_AND_FINISH();
7448 IEM_MC_END();
7449 }
7450 else
7451 {
7452 /* [mem64], XMM */
7453 IEM_MC_BEGIN(0, 2);
7454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7455 IEM_MC_LOCAL(uint64_t, u64Tmp);
7456
7457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7459 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7460 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7461
7462 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7463 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7464
7465 IEM_MC_ADVANCE_RIP_AND_FINISH();
7466 IEM_MC_END();
7467 }
7468 }
7469 else
7470 {
7471 /**
7472 * @opdone
7473 * @opcode 0x7e
7474 * @opcodesub rex.w=0
7475 * @oppfx 0x66
7476 * @opcpuid sse2
7477 * @opgroup og_sse2_simdint_datamove
7478 * @opxcpttype 5
7479 * @opfunction iemOp_movd_q_Vy_Ey
7480 * @optest op1=1 op2=2 -> op1=2
7481 * @optest op1=0 op2=-42 -> op1=-42
7482 */
7483 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7484 if (IEM_IS_MODRM_REG_MODE(bRm))
7485 {
7486 /* greg32, XMM */
7487 IEM_MC_BEGIN(0, 1);
7488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7489 IEM_MC_LOCAL(uint32_t, u32Tmp);
7490
7491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7492 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7493
7494 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7495 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7496
7497 IEM_MC_ADVANCE_RIP_AND_FINISH();
7498 IEM_MC_END();
7499 }
7500 else
7501 {
7502 /* [mem32], XMM */
7503 IEM_MC_BEGIN(0, 2);
7504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7505 IEM_MC_LOCAL(uint32_t, u32Tmp);
7506
7507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7509 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7510 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7511
7512 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7513 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7514
7515 IEM_MC_ADVANCE_RIP_AND_FINISH();
7516 IEM_MC_END();
7517 }
7518 }
7519}
7520
7521/**
7522 * @opcode 0x7e
7523 * @oppfx 0xf3
7524 * @opcpuid sse2
7525 * @opgroup og_sse2_pcksclr_datamove
7526 * @opxcpttype none
7527 * @optest op1=1 op2=2 -> op1=2
7528 * @optest op1=0 op2=-42 -> op1=-42
7529 */
7530FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7531{
7532 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7534 if (IEM_IS_MODRM_REG_MODE(bRm))
7535 {
7536 /*
7537 * XMM128, XMM64.
7538 */
7539 IEM_MC_BEGIN(0, 2);
7540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7541 IEM_MC_LOCAL(uint64_t, uSrc);
7542
7543 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7544 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7545
7546 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7547 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7548
7549 IEM_MC_ADVANCE_RIP_AND_FINISH();
7550 IEM_MC_END();
7551 }
7552 else
7553 {
7554 /*
7555 * XMM128, [mem64].
7556 */
7557 IEM_MC_BEGIN(0, 2);
7558 IEM_MC_LOCAL(uint64_t, uSrc);
7559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7560
7561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7565
7566 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7567 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7568
7569 IEM_MC_ADVANCE_RIP_AND_FINISH();
7570 IEM_MC_END();
7571 }
7572}
7573
7574/* Opcode 0xf2 0x0f 0x7e - invalid */
7575
7576
7577/** Opcode 0x0f 0x7f - movq Qq, Pq */
7578FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7579{
7580 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7582 if (IEM_IS_MODRM_REG_MODE(bRm))
7583 {
7584 /*
7585 * MMX, MMX.
7586 */
7587 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7588 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7589 IEM_MC_BEGIN(0, 1);
7590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7591 IEM_MC_LOCAL(uint64_t, u64Tmp);
7592 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7593 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7594 IEM_MC_FPU_TO_MMX_MODE();
7595
7596 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7597 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7598
7599 IEM_MC_ADVANCE_RIP_AND_FINISH();
7600 IEM_MC_END();
7601 }
7602 else
7603 {
7604 /*
7605 * [mem64], MMX.
7606 */
7607 IEM_MC_BEGIN(0, 2);
7608 IEM_MC_LOCAL(uint64_t, u64Tmp);
7609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7610
7611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7613 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7614 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7615 IEM_MC_FPU_TO_MMX_MODE();
7616
7617 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7618 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7619
7620 IEM_MC_ADVANCE_RIP_AND_FINISH();
7621 IEM_MC_END();
7622 }
7623}
7624
7625/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7626FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7627{
7628 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7630 if (IEM_IS_MODRM_REG_MODE(bRm))
7631 {
7632 /*
7633 * XMM, XMM.
7634 */
7635 IEM_MC_BEGIN(0, 0);
7636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7637 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7638 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7639 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7640 IEM_GET_MODRM_REG(pVCpu, bRm));
7641 IEM_MC_ADVANCE_RIP_AND_FINISH();
7642 IEM_MC_END();
7643 }
7644 else
7645 {
7646 /*
7647 * [mem128], XMM.
7648 */
7649 IEM_MC_BEGIN(0, 2);
7650 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7652
7653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7655 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7656 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7657
7658 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7659 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7660
7661 IEM_MC_ADVANCE_RIP_AND_FINISH();
7662 IEM_MC_END();
7663 }
7664}
7665
7666/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7667FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7668{
7669 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7671 if (IEM_IS_MODRM_REG_MODE(bRm))
7672 {
7673 /*
7674 * XMM, XMM.
7675 */
7676 IEM_MC_BEGIN(0, 0);
7677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7678 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7679 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7680 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7681 IEM_GET_MODRM_REG(pVCpu, bRm));
7682 IEM_MC_ADVANCE_RIP_AND_FINISH();
7683 IEM_MC_END();
7684 }
7685 else
7686 {
7687 /*
7688 * [mem128], XMM.
7689 */
7690 IEM_MC_BEGIN(0, 2);
7691 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7693
7694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7696 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7697 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7698
7699 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7700 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7701
7702 IEM_MC_ADVANCE_RIP_AND_FINISH();
7703 IEM_MC_END();
7704 }
7705}
7706
7707/* Opcode 0xf2 0x0f 0x7f - invalid */
7708
7709
7710
7711/** Opcode 0x0f 0x80. */
7712FNIEMOP_DEF(iemOp_jo_Jv)
7713{
7714 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7715 IEMOP_HLP_MIN_386();
7716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7717 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7718 {
7719 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7720 IEM_MC_BEGIN(0, 0);
7721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7723 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7724 } IEM_MC_ELSE() {
7725 IEM_MC_ADVANCE_RIP_AND_FINISH();
7726 } IEM_MC_ENDIF();
7727 IEM_MC_END();
7728 }
7729 else
7730 {
7731 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7732 IEM_MC_BEGIN(0, 0);
7733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7735 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7736 } IEM_MC_ELSE() {
7737 IEM_MC_ADVANCE_RIP_AND_FINISH();
7738 } IEM_MC_ENDIF();
7739 IEM_MC_END();
7740 }
7741}
7742
7743
7744/** Opcode 0x0f 0x81. */
7745FNIEMOP_DEF(iemOp_jno_Jv)
7746{
7747 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7748 IEMOP_HLP_MIN_386();
7749 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7750 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7751 {
7752 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7753 IEM_MC_BEGIN(0, 0);
7754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7755 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7756 IEM_MC_ADVANCE_RIP_AND_FINISH();
7757 } IEM_MC_ELSE() {
7758 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7759 } IEM_MC_ENDIF();
7760 IEM_MC_END();
7761 }
7762 else
7763 {
7764 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7765 IEM_MC_BEGIN(0, 0);
7766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7767 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7768 IEM_MC_ADVANCE_RIP_AND_FINISH();
7769 } IEM_MC_ELSE() {
7770 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7771 } IEM_MC_ENDIF();
7772 IEM_MC_END();
7773 }
7774}
7775
7776
7777/** Opcode 0x0f 0x82. */
7778FNIEMOP_DEF(iemOp_jc_Jv)
7779{
7780 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7781 IEMOP_HLP_MIN_386();
7782 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7783 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7784 {
7785 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7786 IEM_MC_BEGIN(0, 0);
7787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7789 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7790 } IEM_MC_ELSE() {
7791 IEM_MC_ADVANCE_RIP_AND_FINISH();
7792 } IEM_MC_ENDIF();
7793 IEM_MC_END();
7794 }
7795 else
7796 {
7797 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7798 IEM_MC_BEGIN(0, 0);
7799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7800 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7801 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7802 } IEM_MC_ELSE() {
7803 IEM_MC_ADVANCE_RIP_AND_FINISH();
7804 } IEM_MC_ENDIF();
7805 IEM_MC_END();
7806 }
7807}
7808
7809
7810/** Opcode 0x0f 0x83. */
7811FNIEMOP_DEF(iemOp_jnc_Jv)
7812{
7813 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7814 IEMOP_HLP_MIN_386();
7815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7816 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7817 {
7818 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7819 IEM_MC_BEGIN(0, 0);
7820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7821 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7822 IEM_MC_ADVANCE_RIP_AND_FINISH();
7823 } IEM_MC_ELSE() {
7824 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7825 } IEM_MC_ENDIF();
7826 IEM_MC_END();
7827 }
7828 else
7829 {
7830 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7831 IEM_MC_BEGIN(0, 0);
7832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7833 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7834 IEM_MC_ADVANCE_RIP_AND_FINISH();
7835 } IEM_MC_ELSE() {
7836 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7837 } IEM_MC_ENDIF();
7838 IEM_MC_END();
7839 }
7840}
7841
7842
7843/** Opcode 0x0f 0x84. */
7844FNIEMOP_DEF(iemOp_je_Jv)
7845{
7846 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7847 IEMOP_HLP_MIN_386();
7848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7849 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7850 {
7851 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7852 IEM_MC_BEGIN(0, 0);
7853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7854 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7855 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7856 } IEM_MC_ELSE() {
7857 IEM_MC_ADVANCE_RIP_AND_FINISH();
7858 } IEM_MC_ENDIF();
7859 IEM_MC_END();
7860 }
7861 else
7862 {
7863 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7864 IEM_MC_BEGIN(0, 0);
7865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7867 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7868 } IEM_MC_ELSE() {
7869 IEM_MC_ADVANCE_RIP_AND_FINISH();
7870 } IEM_MC_ENDIF();
7871 IEM_MC_END();
7872 }
7873}
7874
7875
7876/** Opcode 0x0f 0x85. */
7877FNIEMOP_DEF(iemOp_jne_Jv)
7878{
7879 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7880 IEMOP_HLP_MIN_386();
7881 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7882 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7883 {
7884 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7885 IEM_MC_BEGIN(0, 0);
7886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7887 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7888 IEM_MC_ADVANCE_RIP_AND_FINISH();
7889 } IEM_MC_ELSE() {
7890 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7891 } IEM_MC_ENDIF();
7892 IEM_MC_END();
7893 }
7894 else
7895 {
7896 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7897 IEM_MC_BEGIN(0, 0);
7898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7900 IEM_MC_ADVANCE_RIP_AND_FINISH();
7901 } IEM_MC_ELSE() {
7902 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7903 } IEM_MC_ENDIF();
7904 IEM_MC_END();
7905 }
7906}
7907
7908
7909/** Opcode 0x0f 0x86. */
7910FNIEMOP_DEF(iemOp_jbe_Jv)
7911{
7912 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7913 IEMOP_HLP_MIN_386();
7914 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7915 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7916 {
7917 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7918 IEM_MC_BEGIN(0, 0);
7919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7920 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7921 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7922 } IEM_MC_ELSE() {
7923 IEM_MC_ADVANCE_RIP_AND_FINISH();
7924 } IEM_MC_ENDIF();
7925 IEM_MC_END();
7926 }
7927 else
7928 {
7929 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7930 IEM_MC_BEGIN(0, 0);
7931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7932 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7933 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7934 } IEM_MC_ELSE() {
7935 IEM_MC_ADVANCE_RIP_AND_FINISH();
7936 } IEM_MC_ENDIF();
7937 IEM_MC_END();
7938 }
7939}
7940
7941
7942/** Opcode 0x0f 0x87. */
7943FNIEMOP_DEF(iemOp_jnbe_Jv)
7944{
7945 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7946 IEMOP_HLP_MIN_386();
7947 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7948 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7949 {
7950 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7951 IEM_MC_BEGIN(0, 0);
7952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7953 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7954 IEM_MC_ADVANCE_RIP_AND_FINISH();
7955 } IEM_MC_ELSE() {
7956 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7957 } IEM_MC_ENDIF();
7958 IEM_MC_END();
7959 }
7960 else
7961 {
7962 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7963 IEM_MC_BEGIN(0, 0);
7964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7965 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7966 IEM_MC_ADVANCE_RIP_AND_FINISH();
7967 } IEM_MC_ELSE() {
7968 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7969 } IEM_MC_ENDIF();
7970 IEM_MC_END();
7971 }
7972}
7973
7974
7975/** Opcode 0x0f 0x88. */
7976FNIEMOP_DEF(iemOp_js_Jv)
7977{
7978 IEMOP_MNEMONIC(js_Jv, "js Jv");
7979 IEMOP_HLP_MIN_386();
7980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7981 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7982 {
7983 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7984 IEM_MC_BEGIN(0, 0);
7985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7987 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7988 } IEM_MC_ELSE() {
7989 IEM_MC_ADVANCE_RIP_AND_FINISH();
7990 } IEM_MC_ENDIF();
7991 IEM_MC_END();
7992 }
7993 else
7994 {
7995 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7996 IEM_MC_BEGIN(0, 0);
7997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7998 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7999 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8000 } IEM_MC_ELSE() {
8001 IEM_MC_ADVANCE_RIP_AND_FINISH();
8002 } IEM_MC_ENDIF();
8003 IEM_MC_END();
8004 }
8005}
8006
8007
8008/** Opcode 0x0f 0x89. */
8009FNIEMOP_DEF(iemOp_jns_Jv)
8010{
8011 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8012 IEMOP_HLP_MIN_386();
8013 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8014 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8015 {
8016 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8017 IEM_MC_BEGIN(0, 0);
8018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8020 IEM_MC_ADVANCE_RIP_AND_FINISH();
8021 } IEM_MC_ELSE() {
8022 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8023 } IEM_MC_ENDIF();
8024 IEM_MC_END();
8025 }
8026 else
8027 {
8028 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8029 IEM_MC_BEGIN(0, 0);
8030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8031 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8032 IEM_MC_ADVANCE_RIP_AND_FINISH();
8033 } IEM_MC_ELSE() {
8034 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8035 } IEM_MC_ENDIF();
8036 IEM_MC_END();
8037 }
8038}
8039
8040
8041/** Opcode 0x0f 0x8a. */
8042FNIEMOP_DEF(iemOp_jp_Jv)
8043{
8044 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8045 IEMOP_HLP_MIN_386();
8046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8047 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8048 {
8049 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8050 IEM_MC_BEGIN(0, 0);
8051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8052 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8053 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8054 } IEM_MC_ELSE() {
8055 IEM_MC_ADVANCE_RIP_AND_FINISH();
8056 } IEM_MC_ENDIF();
8057 IEM_MC_END();
8058 }
8059 else
8060 {
8061 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8062 IEM_MC_BEGIN(0, 0);
8063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8064 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8065 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8066 } IEM_MC_ELSE() {
8067 IEM_MC_ADVANCE_RIP_AND_FINISH();
8068 } IEM_MC_ENDIF();
8069 IEM_MC_END();
8070 }
8071}
8072
8073
8074/** Opcode 0x0f 0x8b. */
8075FNIEMOP_DEF(iemOp_jnp_Jv)
8076{
8077 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8078 IEMOP_HLP_MIN_386();
8079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8080 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8081 {
8082 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8083 IEM_MC_BEGIN(0, 0);
8084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8085 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8086 IEM_MC_ADVANCE_RIP_AND_FINISH();
8087 } IEM_MC_ELSE() {
8088 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8089 } IEM_MC_ENDIF();
8090 IEM_MC_END();
8091 }
8092 else
8093 {
8094 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8095 IEM_MC_BEGIN(0, 0);
8096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8097 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8098 IEM_MC_ADVANCE_RIP_AND_FINISH();
8099 } IEM_MC_ELSE() {
8100 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8101 } IEM_MC_ENDIF();
8102 IEM_MC_END();
8103 }
8104}
8105
8106
8107/** Opcode 0x0f 0x8c. */
8108FNIEMOP_DEF(iemOp_jl_Jv)
8109{
8110 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8111 IEMOP_HLP_MIN_386();
8112 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8113 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8114 {
8115 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8116 IEM_MC_BEGIN(0, 0);
8117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8118 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8119 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8120 } IEM_MC_ELSE() {
8121 IEM_MC_ADVANCE_RIP_AND_FINISH();
8122 } IEM_MC_ENDIF();
8123 IEM_MC_END();
8124 }
8125 else
8126 {
8127 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8128 IEM_MC_BEGIN(0, 0);
8129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8130 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8131 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8132 } IEM_MC_ELSE() {
8133 IEM_MC_ADVANCE_RIP_AND_FINISH();
8134 } IEM_MC_ENDIF();
8135 IEM_MC_END();
8136 }
8137}
8138
8139
8140/** Opcode 0x0f 0x8d. */
8141FNIEMOP_DEF(iemOp_jnl_Jv)
8142{
8143 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8144 IEMOP_HLP_MIN_386();
8145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8146 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8147 {
8148 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8149 IEM_MC_BEGIN(0, 0);
8150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8151 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8152 IEM_MC_ADVANCE_RIP_AND_FINISH();
8153 } IEM_MC_ELSE() {
8154 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8155 } IEM_MC_ENDIF();
8156 IEM_MC_END();
8157 }
8158 else
8159 {
8160 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8161 IEM_MC_BEGIN(0, 0);
8162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8163 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8164 IEM_MC_ADVANCE_RIP_AND_FINISH();
8165 } IEM_MC_ELSE() {
8166 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8167 } IEM_MC_ENDIF();
8168 IEM_MC_END();
8169 }
8170}
8171
8172
8173/** Opcode 0x0f 0x8e. */
8174FNIEMOP_DEF(iemOp_jle_Jv)
8175{
8176 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8177 IEMOP_HLP_MIN_386();
8178 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8179 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8180 {
8181 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8182 IEM_MC_BEGIN(0, 0);
8183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8184 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8185 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8186 } IEM_MC_ELSE() {
8187 IEM_MC_ADVANCE_RIP_AND_FINISH();
8188 } IEM_MC_ENDIF();
8189 IEM_MC_END();
8190 }
8191 else
8192 {
8193 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8194 IEM_MC_BEGIN(0, 0);
8195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8196 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8197 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8198 } IEM_MC_ELSE() {
8199 IEM_MC_ADVANCE_RIP_AND_FINISH();
8200 } IEM_MC_ENDIF();
8201 IEM_MC_END();
8202 }
8203}
8204
8205
8206/** Opcode 0x0f 0x8f. */
8207FNIEMOP_DEF(iemOp_jnle_Jv)
8208{
8209 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8210 IEMOP_HLP_MIN_386();
8211 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8212 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8213 {
8214 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8215 IEM_MC_BEGIN(0, 0);
8216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8217 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8218 IEM_MC_ADVANCE_RIP_AND_FINISH();
8219 } IEM_MC_ELSE() {
8220 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8221 } IEM_MC_ENDIF();
8222 IEM_MC_END();
8223 }
8224 else
8225 {
8226 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8227 IEM_MC_BEGIN(0, 0);
8228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8229 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8230 IEM_MC_ADVANCE_RIP_AND_FINISH();
8231 } IEM_MC_ELSE() {
8232 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8233 } IEM_MC_ENDIF();
8234 IEM_MC_END();
8235 }
8236}
8237
8238
8239/** Opcode 0x0f 0x90. */
8240FNIEMOP_DEF(iemOp_seto_Eb)
8241{
8242 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8243 IEMOP_HLP_MIN_386();
8244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8245
8246 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8247 * any way. AMD says it's "unused", whatever that means. We're
8248 * ignoring for now. */
8249 if (IEM_IS_MODRM_REG_MODE(bRm))
8250 {
8251 /* register target */
8252 IEM_MC_BEGIN(0, 0);
8253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8254 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8255 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8256 } IEM_MC_ELSE() {
8257 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8258 } IEM_MC_ENDIF();
8259 IEM_MC_ADVANCE_RIP_AND_FINISH();
8260 IEM_MC_END();
8261 }
8262 else
8263 {
8264 /* memory target */
8265 IEM_MC_BEGIN(0, 1);
8266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8269 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8270 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8271 } IEM_MC_ELSE() {
8272 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8273 } IEM_MC_ENDIF();
8274 IEM_MC_ADVANCE_RIP_AND_FINISH();
8275 IEM_MC_END();
8276 }
8277}
8278
8279
8280/** Opcode 0x0f 0x91. */
8281FNIEMOP_DEF(iemOp_setno_Eb)
8282{
8283 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8284 IEMOP_HLP_MIN_386();
8285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8286
8287 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8288 * any way. AMD says it's "unused", whatever that means. We're
8289 * ignoring for now. */
8290 if (IEM_IS_MODRM_REG_MODE(bRm))
8291 {
8292 /* register target */
8293 IEM_MC_BEGIN(0, 0);
8294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8295 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8296 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8297 } IEM_MC_ELSE() {
8298 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8299 } IEM_MC_ENDIF();
8300 IEM_MC_ADVANCE_RIP_AND_FINISH();
8301 IEM_MC_END();
8302 }
8303 else
8304 {
8305 /* memory target */
8306 IEM_MC_BEGIN(0, 1);
8307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8310 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8311 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8312 } IEM_MC_ELSE() {
8313 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8314 } IEM_MC_ENDIF();
8315 IEM_MC_ADVANCE_RIP_AND_FINISH();
8316 IEM_MC_END();
8317 }
8318}
8319
8320
8321/** Opcode 0x0f 0x92. */
8322FNIEMOP_DEF(iemOp_setc_Eb)
8323{
8324 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8325 IEMOP_HLP_MIN_386();
8326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8327
8328 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8329 * any way. AMD says it's "unused", whatever that means. We're
8330 * ignoring for now. */
8331 if (IEM_IS_MODRM_REG_MODE(bRm))
8332 {
8333 /* register target */
8334 IEM_MC_BEGIN(0, 0);
8335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8336 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8337 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8338 } IEM_MC_ELSE() {
8339 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8340 } IEM_MC_ENDIF();
8341 IEM_MC_ADVANCE_RIP_AND_FINISH();
8342 IEM_MC_END();
8343 }
8344 else
8345 {
8346 /* memory target */
8347 IEM_MC_BEGIN(0, 1);
8348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8352 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8353 } IEM_MC_ELSE() {
8354 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8355 } IEM_MC_ENDIF();
8356 IEM_MC_ADVANCE_RIP_AND_FINISH();
8357 IEM_MC_END();
8358 }
8359}
8360
8361
8362/** Opcode 0x0f 0x93. */
8363FNIEMOP_DEF(iemOp_setnc_Eb)
8364{
8365 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8366 IEMOP_HLP_MIN_386();
8367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8368
8369 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8370 * any way. AMD says it's "unused", whatever that means. We're
8371 * ignoring for now. */
8372 if (IEM_IS_MODRM_REG_MODE(bRm))
8373 {
8374 /* register target */
8375 IEM_MC_BEGIN(0, 0);
8376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8377 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8378 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8379 } IEM_MC_ELSE() {
8380 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8381 } IEM_MC_ENDIF();
8382 IEM_MC_ADVANCE_RIP_AND_FINISH();
8383 IEM_MC_END();
8384 }
8385 else
8386 {
8387 /* memory target */
8388 IEM_MC_BEGIN(0, 1);
8389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8392 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8393 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8394 } IEM_MC_ELSE() {
8395 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8396 } IEM_MC_ENDIF();
8397 IEM_MC_ADVANCE_RIP_AND_FINISH();
8398 IEM_MC_END();
8399 }
8400}
8401
8402
8403/** Opcode 0x0f 0x94. */
8404FNIEMOP_DEF(iemOp_sete_Eb)
8405{
8406 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8407 IEMOP_HLP_MIN_386();
8408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8409
8410 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8411 * any way. AMD says it's "unused", whatever that means. We're
8412 * ignoring for now. */
8413 if (IEM_IS_MODRM_REG_MODE(bRm))
8414 {
8415 /* register target */
8416 IEM_MC_BEGIN(0, 0);
8417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8418 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8419 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8420 } IEM_MC_ELSE() {
8421 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8422 } IEM_MC_ENDIF();
8423 IEM_MC_ADVANCE_RIP_AND_FINISH();
8424 IEM_MC_END();
8425 }
8426 else
8427 {
8428 /* memory target */
8429 IEM_MC_BEGIN(0, 1);
8430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8433 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8434 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8435 } IEM_MC_ELSE() {
8436 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8437 } IEM_MC_ENDIF();
8438 IEM_MC_ADVANCE_RIP_AND_FINISH();
8439 IEM_MC_END();
8440 }
8441}
8442
8443
8444/** Opcode 0x0f 0x95. */
8445FNIEMOP_DEF(iemOp_setne_Eb)
8446{
8447 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8448 IEMOP_HLP_MIN_386();
8449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8450
8451 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8452 * any way. AMD says it's "unused", whatever that means. We're
8453 * ignoring for now. */
8454 if (IEM_IS_MODRM_REG_MODE(bRm))
8455 {
8456 /* register target */
8457 IEM_MC_BEGIN(0, 0);
8458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8459 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8460 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8461 } IEM_MC_ELSE() {
8462 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8463 } IEM_MC_ENDIF();
8464 IEM_MC_ADVANCE_RIP_AND_FINISH();
8465 IEM_MC_END();
8466 }
8467 else
8468 {
8469 /* memory target */
8470 IEM_MC_BEGIN(0, 1);
8471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8474 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8475 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8476 } IEM_MC_ELSE() {
8477 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8478 } IEM_MC_ENDIF();
8479 IEM_MC_ADVANCE_RIP_AND_FINISH();
8480 IEM_MC_END();
8481 }
8482}
8483
8484
8485/** Opcode 0x0f 0x96. */
8486FNIEMOP_DEF(iemOp_setbe_Eb)
8487{
8488 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8489 IEMOP_HLP_MIN_386();
8490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8491
8492 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8493 * any way. AMD says it's "unused", whatever that means. We're
8494 * ignoring for now. */
8495 if (IEM_IS_MODRM_REG_MODE(bRm))
8496 {
8497 /* register target */
8498 IEM_MC_BEGIN(0, 0);
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8501 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8502 } IEM_MC_ELSE() {
8503 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8504 } IEM_MC_ENDIF();
8505 IEM_MC_ADVANCE_RIP_AND_FINISH();
8506 IEM_MC_END();
8507 }
8508 else
8509 {
8510 /* memory target */
8511 IEM_MC_BEGIN(0, 1);
8512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8515 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8516 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8517 } IEM_MC_ELSE() {
8518 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8519 } IEM_MC_ENDIF();
8520 IEM_MC_ADVANCE_RIP_AND_FINISH();
8521 IEM_MC_END();
8522 }
8523}
8524
8525
8526/** Opcode 0x0f 0x97. */
8527FNIEMOP_DEF(iemOp_setnbe_Eb)
8528{
8529 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8530 IEMOP_HLP_MIN_386();
8531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8532
8533 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8534 * any way. AMD says it's "unused", whatever that means. We're
8535 * ignoring for now. */
8536 if (IEM_IS_MODRM_REG_MODE(bRm))
8537 {
8538 /* register target */
8539 IEM_MC_BEGIN(0, 0);
8540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8541 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8542 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8543 } IEM_MC_ELSE() {
8544 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8545 } IEM_MC_ENDIF();
8546 IEM_MC_ADVANCE_RIP_AND_FINISH();
8547 IEM_MC_END();
8548 }
8549 else
8550 {
8551 /* memory target */
8552 IEM_MC_BEGIN(0, 1);
8553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8556 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8557 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8558 } IEM_MC_ELSE() {
8559 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8560 } IEM_MC_ENDIF();
8561 IEM_MC_ADVANCE_RIP_AND_FINISH();
8562 IEM_MC_END();
8563 }
8564}
8565
8566
8567/** Opcode 0x0f 0x98. */
8568FNIEMOP_DEF(iemOp_sets_Eb)
8569{
8570 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8571 IEMOP_HLP_MIN_386();
8572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8573
8574 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8575 * any way. AMD says it's "unused", whatever that means. We're
8576 * ignoring for now. */
8577 if (IEM_IS_MODRM_REG_MODE(bRm))
8578 {
8579 /* register target */
8580 IEM_MC_BEGIN(0, 0);
8581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8583 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8584 } IEM_MC_ELSE() {
8585 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8586 } IEM_MC_ENDIF();
8587 IEM_MC_ADVANCE_RIP_AND_FINISH();
8588 IEM_MC_END();
8589 }
8590 else
8591 {
8592 /* memory target */
8593 IEM_MC_BEGIN(0, 1);
8594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8597 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8598 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8599 } IEM_MC_ELSE() {
8600 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8601 } IEM_MC_ENDIF();
8602 IEM_MC_ADVANCE_RIP_AND_FINISH();
8603 IEM_MC_END();
8604 }
8605}
8606
8607
8608/** Opcode 0x0f 0x99. */
8609FNIEMOP_DEF(iemOp_setns_Eb)
8610{
8611 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8612 IEMOP_HLP_MIN_386();
8613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8614
8615 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8616 * any way. AMD says it's "unused", whatever that means. We're
8617 * ignoring for now. */
8618 if (IEM_IS_MODRM_REG_MODE(bRm))
8619 {
8620 /* register target */
8621 IEM_MC_BEGIN(0, 0);
8622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8624 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8625 } IEM_MC_ELSE() {
8626 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8627 } IEM_MC_ENDIF();
8628 IEM_MC_ADVANCE_RIP_AND_FINISH();
8629 IEM_MC_END();
8630 }
8631 else
8632 {
8633 /* memory target */
8634 IEM_MC_BEGIN(0, 1);
8635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8638 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8639 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8640 } IEM_MC_ELSE() {
8641 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8642 } IEM_MC_ENDIF();
8643 IEM_MC_ADVANCE_RIP_AND_FINISH();
8644 IEM_MC_END();
8645 }
8646}
8647
8648
8649/** Opcode 0x0f 0x9a. */
8650FNIEMOP_DEF(iemOp_setp_Eb)
8651{
8652 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8653 IEMOP_HLP_MIN_386();
8654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8655
8656 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8657 * any way. AMD says it's "unused", whatever that means. We're
8658 * ignoring for now. */
8659 if (IEM_IS_MODRM_REG_MODE(bRm))
8660 {
8661 /* register target */
8662 IEM_MC_BEGIN(0, 0);
8663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8664 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8665 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8666 } IEM_MC_ELSE() {
8667 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8668 } IEM_MC_ENDIF();
8669 IEM_MC_ADVANCE_RIP_AND_FINISH();
8670 IEM_MC_END();
8671 }
8672 else
8673 {
8674 /* memory target */
8675 IEM_MC_BEGIN(0, 1);
8676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8679 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8680 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8681 } IEM_MC_ELSE() {
8682 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8683 } IEM_MC_ENDIF();
8684 IEM_MC_ADVANCE_RIP_AND_FINISH();
8685 IEM_MC_END();
8686 }
8687}
8688
8689
8690/** Opcode 0x0f 0x9b. */
8691FNIEMOP_DEF(iemOp_setnp_Eb)
8692{
8693 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8694 IEMOP_HLP_MIN_386();
8695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8696
8697 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8698 * any way. AMD says it's "unused", whatever that means. We're
8699 * ignoring for now. */
8700 if (IEM_IS_MODRM_REG_MODE(bRm))
8701 {
8702 /* register target */
8703 IEM_MC_BEGIN(0, 0);
8704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8705 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8706 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8707 } IEM_MC_ELSE() {
8708 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8709 } IEM_MC_ENDIF();
8710 IEM_MC_ADVANCE_RIP_AND_FINISH();
8711 IEM_MC_END();
8712 }
8713 else
8714 {
8715 /* memory target */
8716 IEM_MC_BEGIN(0, 1);
8717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8721 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8722 } IEM_MC_ELSE() {
8723 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8724 } IEM_MC_ENDIF();
8725 IEM_MC_ADVANCE_RIP_AND_FINISH();
8726 IEM_MC_END();
8727 }
8728}
8729
8730
8731/** Opcode 0x0f 0x9c. */
8732FNIEMOP_DEF(iemOp_setl_Eb)
8733{
8734 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8735 IEMOP_HLP_MIN_386();
8736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8737
8738 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8739 * any way. AMD says it's "unused", whatever that means. We're
8740 * ignoring for now. */
8741 if (IEM_IS_MODRM_REG_MODE(bRm))
8742 {
8743 /* register target */
8744 IEM_MC_BEGIN(0, 0);
8745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8746 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8747 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8748 } IEM_MC_ELSE() {
8749 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8750 } IEM_MC_ENDIF();
8751 IEM_MC_ADVANCE_RIP_AND_FINISH();
8752 IEM_MC_END();
8753 }
8754 else
8755 {
8756 /* memory target */
8757 IEM_MC_BEGIN(0, 1);
8758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8761 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8762 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8763 } IEM_MC_ELSE() {
8764 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8765 } IEM_MC_ENDIF();
8766 IEM_MC_ADVANCE_RIP_AND_FINISH();
8767 IEM_MC_END();
8768 }
8769}
8770
8771
8772/** Opcode 0x0f 0x9d. */
8773FNIEMOP_DEF(iemOp_setnl_Eb)
8774{
8775 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8776 IEMOP_HLP_MIN_386();
8777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8778
8779 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8780 * any way. AMD says it's "unused", whatever that means. We're
8781 * ignoring for now. */
8782 if (IEM_IS_MODRM_REG_MODE(bRm))
8783 {
8784 /* register target */
8785 IEM_MC_BEGIN(0, 0);
8786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8787 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8788 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8789 } IEM_MC_ELSE() {
8790 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8791 } IEM_MC_ENDIF();
8792 IEM_MC_ADVANCE_RIP_AND_FINISH();
8793 IEM_MC_END();
8794 }
8795 else
8796 {
8797 /* memory target */
8798 IEM_MC_BEGIN(0, 1);
8799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8802 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8803 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8804 } IEM_MC_ELSE() {
8805 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8806 } IEM_MC_ENDIF();
8807 IEM_MC_ADVANCE_RIP_AND_FINISH();
8808 IEM_MC_END();
8809 }
8810}
8811
8812
8813/** Opcode 0x0f 0x9e. */
8814FNIEMOP_DEF(iemOp_setle_Eb)
8815{
8816 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8817 IEMOP_HLP_MIN_386();
8818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8819
8820 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8821 * any way. AMD says it's "unused", whatever that means. We're
8822 * ignoring for now. */
8823 if (IEM_IS_MODRM_REG_MODE(bRm))
8824 {
8825 /* register target */
8826 IEM_MC_BEGIN(0, 0);
8827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8828 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8829 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8830 } IEM_MC_ELSE() {
8831 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8832 } IEM_MC_ENDIF();
8833 IEM_MC_ADVANCE_RIP_AND_FINISH();
8834 IEM_MC_END();
8835 }
8836 else
8837 {
8838 /* memory target */
8839 IEM_MC_BEGIN(0, 1);
8840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8843 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8844 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8845 } IEM_MC_ELSE() {
8846 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8847 } IEM_MC_ENDIF();
8848 IEM_MC_ADVANCE_RIP_AND_FINISH();
8849 IEM_MC_END();
8850 }
8851}
8852
8853
8854/** Opcode 0x0f 0x9f. */
8855FNIEMOP_DEF(iemOp_setnle_Eb)
8856{
8857 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8858 IEMOP_HLP_MIN_386();
8859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8860
8861 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8862 * any way. AMD says it's "unused", whatever that means. We're
8863 * ignoring for now. */
8864 if (IEM_IS_MODRM_REG_MODE(bRm))
8865 {
8866 /* register target */
8867 IEM_MC_BEGIN(0, 0);
8868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8869 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8870 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8871 } IEM_MC_ELSE() {
8872 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8873 } IEM_MC_ENDIF();
8874 IEM_MC_ADVANCE_RIP_AND_FINISH();
8875 IEM_MC_END();
8876 }
8877 else
8878 {
8879 /* memory target */
8880 IEM_MC_BEGIN(0, 1);
8881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8884 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8885 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8886 } IEM_MC_ELSE() {
8887 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8888 } IEM_MC_ENDIF();
8889 IEM_MC_ADVANCE_RIP_AND_FINISH();
8890 IEM_MC_END();
8891 }
8892}
8893
8894
8895/**
8896 * Common 'push segment-register' helper.
8897 */
8898FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8899{
8900 Assert(iReg < X86_SREG_FS || !IEM_IS_64BIT_CODE(pVCpu));
8901 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8902
8903 switch (pVCpu->iem.s.enmEffOpSize)
8904 {
8905 case IEMMODE_16BIT:
8906 IEM_MC_BEGIN(0, 1);
8907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8908 IEM_MC_LOCAL(uint16_t, u16Value);
8909 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8910 IEM_MC_PUSH_U16(u16Value);
8911 IEM_MC_ADVANCE_RIP_AND_FINISH();
8912 IEM_MC_END();
8913 break;
8914
8915 case IEMMODE_32BIT:
8916 IEM_MC_BEGIN(0, 1);
8917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8918 IEM_MC_LOCAL(uint32_t, u32Value);
8919 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8920 IEM_MC_PUSH_U32_SREG(u32Value);
8921 IEM_MC_ADVANCE_RIP_AND_FINISH();
8922 IEM_MC_END();
8923 break;
8924
8925 case IEMMODE_64BIT:
8926 IEM_MC_BEGIN(0, 1);
8927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8928 IEM_MC_LOCAL(uint64_t, u64Value);
8929 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8930 IEM_MC_PUSH_U64(u64Value);
8931 IEM_MC_ADVANCE_RIP_AND_FINISH();
8932 IEM_MC_END();
8933 break;
8934
8935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8936 }
8937}
8938
8939
8940/** Opcode 0x0f 0xa0. */
8941FNIEMOP_DEF(iemOp_push_fs)
8942{
8943 IEMOP_MNEMONIC(push_fs, "push fs");
8944 IEMOP_HLP_MIN_386();
8945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8946 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8947}
8948
8949
8950/** Opcode 0x0f 0xa1. */
8951FNIEMOP_DEF(iemOp_pop_fs)
8952{
8953 IEMOP_MNEMONIC(pop_fs, "pop fs");
8954 IEMOP_HLP_MIN_386();
8955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8956 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8957}
8958
8959
8960/** Opcode 0x0f 0xa2. */
8961FNIEMOP_DEF(iemOp_cpuid)
8962{
8963 IEMOP_MNEMONIC(cpuid, "cpuid");
8964 IEMOP_HLP_MIN_486(); /* not all 486es. */
8965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8966 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_cpuid);
8967}
8968
8969
8970/**
8971 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8972 * iemOp_bts_Ev_Gv.
8973 */
8974#define IEMOP_BODY_BIT_Ev_Gv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
8975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8976 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8977 \
8978 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8979 { \
8980 /* register destination. */ \
8981 switch (pVCpu->iem.s.enmEffOpSize) \
8982 { \
8983 case IEMMODE_16BIT: \
8984 IEM_MC_BEGIN(3, 0); \
8985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8986 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8987 IEM_MC_ARG(uint16_t, u16Src, 1); \
8988 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8989 \
8990 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8991 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8992 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8993 IEM_MC_REF_EFLAGS(pEFlags); \
8994 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8995 \
8996 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8997 IEM_MC_END(); \
8998 break; \
8999 \
9000 case IEMMODE_32BIT: \
9001 IEM_MC_BEGIN(3, 0); \
9002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9003 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9004 IEM_MC_ARG(uint32_t, u32Src, 1); \
9005 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9006 \
9007 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9008 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9009 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9010 IEM_MC_REF_EFLAGS(pEFlags); \
9011 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9012 \
9013 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
9014 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9015 IEM_MC_END(); \
9016 break; \
9017 \
9018 case IEMMODE_64BIT: \
9019 IEM_MC_BEGIN(3, 0); \
9020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9021 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9022 IEM_MC_ARG(uint64_t, u64Src, 1); \
9023 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9024 \
9025 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9026 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9027 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9028 IEM_MC_REF_EFLAGS(pEFlags); \
9029 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9030 \
9031 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9032 IEM_MC_END(); \
9033 break; \
9034 \
9035 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9036 } \
9037 } \
9038 else \
9039 { \
9040 /* memory destination. */ \
9041 /** @todo test negative bit offsets! */ \
9042 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9043 { \
9044 switch (pVCpu->iem.s.enmEffOpSize) \
9045 { \
9046 case IEMMODE_16BIT: \
9047 IEM_MC_BEGIN(3, 2); \
9048 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9049 IEM_MC_ARG(uint16_t, u16Src, 1); \
9050 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9052 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9053 \
9054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9055 IEMOP_HLP_DONE_DECODING(); \
9056 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9057 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9058 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9059 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9060 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9061 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9062 IEM_MC_FETCH_EFLAGS(EFlags); \
9063 \
9064 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9065 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9066 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
9067 \
9068 IEM_MC_COMMIT_EFLAGS(EFlags); \
9069 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9070 IEM_MC_END(); \
9071 break; \
9072 \
9073 case IEMMODE_32BIT: \
9074 IEM_MC_BEGIN(3, 2); \
9075 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9076 IEM_MC_ARG(uint32_t, u32Src, 1); \
9077 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9079 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9080 \
9081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9082 IEMOP_HLP_DONE_DECODING(); \
9083 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9084 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9085 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9086 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9087 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9088 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9089 IEM_MC_FETCH_EFLAGS(EFlags); \
9090 \
9091 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9092 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9093 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
9094 \
9095 IEM_MC_COMMIT_EFLAGS(EFlags); \
9096 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9097 IEM_MC_END(); \
9098 break; \
9099 \
9100 case IEMMODE_64BIT: \
9101 IEM_MC_BEGIN(3, 2); \
9102 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9103 IEM_MC_ARG(uint64_t, u64Src, 1); \
9104 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9106 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9107 \
9108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9109 IEMOP_HLP_DONE_DECODING(); \
9110 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9111 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9112 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9113 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9114 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9115 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9116 IEM_MC_FETCH_EFLAGS(EFlags); \
9117 \
9118 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9119 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9120 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
9121 \
9122 IEM_MC_COMMIT_EFLAGS(EFlags); \
9123 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9124 IEM_MC_END(); \
9125 break; \
9126 \
9127 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9128 } \
9129 } \
9130 else \
9131 { \
9132 (void)0
9133
9134#define IEMOP_BODY_BIT_Ev_Gv_NO_LOCK() \
9135 IEMOP_HLP_DONE_DECODING(); \
9136 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9137 } \
9138 } \
9139 (void)0
9140
9141#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9142 switch (pVCpu->iem.s.enmEffOpSize) \
9143 { \
9144 case IEMMODE_16BIT: \
9145 IEM_MC_BEGIN(3, 2); \
9146 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9147 IEM_MC_ARG(uint16_t, u16Src, 1); \
9148 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9150 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9151 \
9152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9153 IEMOP_HLP_DONE_DECODING(); \
9154 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9155 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9156 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9157 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9158 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9159 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9160 IEM_MC_FETCH_EFLAGS(EFlags); \
9161 \
9162 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9163 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
9165 \
9166 IEM_MC_COMMIT_EFLAGS(EFlags); \
9167 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9168 IEM_MC_END(); \
9169 break; \
9170 \
9171 case IEMMODE_32BIT: \
9172 IEM_MC_BEGIN(3, 2); \
9173 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9174 IEM_MC_ARG(uint32_t, u32Src, 1); \
9175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9177 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9178 \
9179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9180 IEMOP_HLP_DONE_DECODING(); \
9181 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9182 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9183 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9184 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9185 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9186 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9187 IEM_MC_FETCH_EFLAGS(EFlags); \
9188 \
9189 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9190 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9191 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
9192 \
9193 IEM_MC_COMMIT_EFLAGS(EFlags); \
9194 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9195 IEM_MC_END(); \
9196 break; \
9197 \
9198 case IEMMODE_64BIT: \
9199 IEM_MC_BEGIN(3, 2); \
9200 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9201 IEM_MC_ARG(uint64_t, u64Src, 1); \
9202 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9204 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9205 \
9206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9207 IEMOP_HLP_DONE_DECODING(); \
9208 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9209 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9210 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9211 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9212 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9213 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9214 IEM_MC_FETCH_EFLAGS(EFlags); \
9215 \
9216 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9217 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9218 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
9219 \
9220 IEM_MC_COMMIT_EFLAGS(EFlags); \
9221 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9222 IEM_MC_END(); \
9223 break; \
9224 \
9225 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9226 } \
9227 } \
9228 } \
9229 (void)0
9230
9231
9232/** Opcode 0x0f 0xa3. */
9233FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9234{
9235 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9236 IEMOP_HLP_MIN_386();
9237 IEMOP_BODY_BIT_Ev_Gv(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64, IEM_ACCESS_DATA_R);
9238 IEMOP_BODY_BIT_Ev_Gv_NO_LOCK();
9239}
9240
9241
9242/**
9243 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9244 */
9245FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9246{
9247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9248 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9249
9250 if (IEM_IS_MODRM_REG_MODE(bRm))
9251 {
9252 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9253
9254 switch (pVCpu->iem.s.enmEffOpSize)
9255 {
9256 case IEMMODE_16BIT:
9257 IEM_MC_BEGIN(4, 0);
9258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9259 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9260 IEM_MC_ARG(uint16_t, u16Src, 1);
9261 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9262 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9263
9264 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9265 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9266 IEM_MC_REF_EFLAGS(pEFlags);
9267 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9268
9269 IEM_MC_ADVANCE_RIP_AND_FINISH();
9270 IEM_MC_END();
9271 break;
9272
9273 case IEMMODE_32BIT:
9274 IEM_MC_BEGIN(4, 0);
9275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9276 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9277 IEM_MC_ARG(uint32_t, u32Src, 1);
9278 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9279 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9280
9281 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9282 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9283 IEM_MC_REF_EFLAGS(pEFlags);
9284 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9285
9286 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9287 IEM_MC_ADVANCE_RIP_AND_FINISH();
9288 IEM_MC_END();
9289 break;
9290
9291 case IEMMODE_64BIT:
9292 IEM_MC_BEGIN(4, 0);
9293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9294 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9295 IEM_MC_ARG(uint64_t, u64Src, 1);
9296 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9297 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9298
9299 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9300 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9301 IEM_MC_REF_EFLAGS(pEFlags);
9302 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9303
9304 IEM_MC_ADVANCE_RIP_AND_FINISH();
9305 IEM_MC_END();
9306 break;
9307
9308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9309 }
9310 }
9311 else
9312 {
9313 switch (pVCpu->iem.s.enmEffOpSize)
9314 {
9315 case IEMMODE_16BIT:
9316 IEM_MC_BEGIN(4, 2);
9317 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9318 IEM_MC_ARG(uint16_t, u16Src, 1);
9319 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9320 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9322
9323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9324 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9325 IEM_MC_ASSIGN(cShiftArg, cShift);
9326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9327 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9328 IEM_MC_FETCH_EFLAGS(EFlags);
9329 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9330 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9331
9332 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9333 IEM_MC_COMMIT_EFLAGS(EFlags);
9334 IEM_MC_ADVANCE_RIP_AND_FINISH();
9335 IEM_MC_END();
9336 break;
9337
9338 case IEMMODE_32BIT:
9339 IEM_MC_BEGIN(4, 2);
9340 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9341 IEM_MC_ARG(uint32_t, u32Src, 1);
9342 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9343 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9345
9346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9347 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9348 IEM_MC_ASSIGN(cShiftArg, cShift);
9349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9350 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9351 IEM_MC_FETCH_EFLAGS(EFlags);
9352 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9353 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9354
9355 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9356 IEM_MC_COMMIT_EFLAGS(EFlags);
9357 IEM_MC_ADVANCE_RIP_AND_FINISH();
9358 IEM_MC_END();
9359 break;
9360
9361 case IEMMODE_64BIT:
9362 IEM_MC_BEGIN(4, 2);
9363 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9364 IEM_MC_ARG(uint64_t, u64Src, 1);
9365 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9366 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9368
9369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9370 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9371 IEM_MC_ASSIGN(cShiftArg, cShift);
9372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9373 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9374 IEM_MC_FETCH_EFLAGS(EFlags);
9375 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9376 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9377
9378 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9379 IEM_MC_COMMIT_EFLAGS(EFlags);
9380 IEM_MC_ADVANCE_RIP_AND_FINISH();
9381 IEM_MC_END();
9382 break;
9383
9384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9385 }
9386 }
9387}
9388
9389
9390/**
9391 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9392 */
9393FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9394{
9395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9397
9398 if (IEM_IS_MODRM_REG_MODE(bRm))
9399 {
9400 switch (pVCpu->iem.s.enmEffOpSize)
9401 {
9402 case IEMMODE_16BIT:
9403 IEM_MC_BEGIN(4, 0);
9404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9405 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9406 IEM_MC_ARG(uint16_t, u16Src, 1);
9407 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9408 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9409
9410 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9411 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9412 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9413 IEM_MC_REF_EFLAGS(pEFlags);
9414 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9415
9416 IEM_MC_ADVANCE_RIP_AND_FINISH();
9417 IEM_MC_END();
9418 break;
9419
9420 case IEMMODE_32BIT:
9421 IEM_MC_BEGIN(4, 0);
9422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9423 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9424 IEM_MC_ARG(uint32_t, u32Src, 1);
9425 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9426 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9427
9428 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9429 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9430 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9431 IEM_MC_REF_EFLAGS(pEFlags);
9432 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9433
9434 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9435 IEM_MC_ADVANCE_RIP_AND_FINISH();
9436 IEM_MC_END();
9437 break;
9438
9439 case IEMMODE_64BIT:
9440 IEM_MC_BEGIN(4, 0);
9441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9442 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9443 IEM_MC_ARG(uint64_t, u64Src, 1);
9444 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9445 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9446
9447 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9448 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9449 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9450 IEM_MC_REF_EFLAGS(pEFlags);
9451 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9452
9453 IEM_MC_ADVANCE_RIP_AND_FINISH();
9454 IEM_MC_END();
9455 break;
9456
9457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9458 }
9459 }
9460 else
9461 {
9462 switch (pVCpu->iem.s.enmEffOpSize)
9463 {
9464 case IEMMODE_16BIT:
9465 IEM_MC_BEGIN(4, 2);
9466 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9467 IEM_MC_ARG(uint16_t, u16Src, 1);
9468 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9469 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9471
9472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9474 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9475 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9476 IEM_MC_FETCH_EFLAGS(EFlags);
9477 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9478 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9479
9480 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9481 IEM_MC_COMMIT_EFLAGS(EFlags);
9482 IEM_MC_ADVANCE_RIP_AND_FINISH();
9483 IEM_MC_END();
9484 break;
9485
9486 case IEMMODE_32BIT:
9487 IEM_MC_BEGIN(4, 2);
9488 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9489 IEM_MC_ARG(uint32_t, u32Src, 1);
9490 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9491 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9493
9494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9496 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9497 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9498 IEM_MC_FETCH_EFLAGS(EFlags);
9499 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9500 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9501
9502 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9503 IEM_MC_COMMIT_EFLAGS(EFlags);
9504 IEM_MC_ADVANCE_RIP_AND_FINISH();
9505 IEM_MC_END();
9506 break;
9507
9508 case IEMMODE_64BIT:
9509 IEM_MC_BEGIN(4, 2);
9510 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9511 IEM_MC_ARG(uint64_t, u64Src, 1);
9512 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9513 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9515
9516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9518 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9519 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9520 IEM_MC_FETCH_EFLAGS(EFlags);
9521 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9522 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9523
9524 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9525 IEM_MC_COMMIT_EFLAGS(EFlags);
9526 IEM_MC_ADVANCE_RIP_AND_FINISH();
9527 IEM_MC_END();
9528 break;
9529
9530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9531 }
9532 }
9533}
9534
9535
9536
9537/** Opcode 0x0f 0xa4. */
9538FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9539{
9540 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9541 IEMOP_HLP_MIN_386();
9542 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9543}
9544
9545
9546/** Opcode 0x0f 0xa5. */
9547FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9548{
9549 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9550 IEMOP_HLP_MIN_386();
9551 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9552}
9553
9554
9555/** Opcode 0x0f 0xa8. */
9556FNIEMOP_DEF(iemOp_push_gs)
9557{
9558 IEMOP_MNEMONIC(push_gs, "push gs");
9559 IEMOP_HLP_MIN_386();
9560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9561 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9562}
9563
9564
9565/** Opcode 0x0f 0xa9. */
9566FNIEMOP_DEF(iemOp_pop_gs)
9567{
9568 IEMOP_MNEMONIC(pop_gs, "pop gs");
9569 IEMOP_HLP_MIN_386();
9570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9571 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9572}
9573
9574
9575/** Opcode 0x0f 0xaa. */
9576FNIEMOP_DEF(iemOp_rsm)
9577{
9578 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9579 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9581 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
9582 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
9583 iemCImpl_rsm);
9584}
9585
9586
9587
9588/** Opcode 0x0f 0xab. */
9589FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9590{
9591 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9592 IEMOP_HLP_MIN_386();
9593 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64, IEM_ACCESS_DATA_RW);
9594 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9595}
9596
9597
9598/** Opcode 0x0f 0xac. */
9599FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9600{
9601 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9602 IEMOP_HLP_MIN_386();
9603 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9604}
9605
9606
9607/** Opcode 0x0f 0xad. */
9608FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9609{
9610 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9611 IEMOP_HLP_MIN_386();
9612 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9613}
9614
9615
9616/** Opcode 0x0f 0xae mem/0. */
9617FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9618{
9619 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9620 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9621 IEMOP_RAISE_INVALID_OPCODE_RET();
9622
9623 IEM_MC_BEGIN(3, 1);
9624 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9625 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9626 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9629 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9630 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9631 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9632 IEM_MC_END();
9633}
9634
9635
9636/** Opcode 0x0f 0xae mem/1. */
9637FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9638{
9639 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9640 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9641 IEMOP_RAISE_INVALID_OPCODE_RET();
9642
9643 IEM_MC_BEGIN(3, 1);
9644 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9645 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9646 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9649 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9650 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9651 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9652 IEM_MC_END();
9653}
9654
9655
9656/**
9657 * @opmaps grp15
9658 * @opcode !11/2
9659 * @oppfx none
9660 * @opcpuid sse
9661 * @opgroup og_sse_mxcsrsm
9662 * @opxcpttype 5
9663 * @optest op1=0 -> mxcsr=0
9664 * @optest op1=0x2083 -> mxcsr=0x2083
9665 * @optest op1=0xfffffffe -> value.xcpt=0xd
9666 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9667 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9668 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9669 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9670 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9671 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9672 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9673 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9674 */
9675FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9676{
9677 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9678 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9679 IEMOP_RAISE_INVALID_OPCODE_RET();
9680
9681 IEM_MC_BEGIN(2, 0);
9682 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9683 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9686 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9687 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9688 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9689 IEM_MC_END();
9690}
9691
9692
9693/**
9694 * @opmaps grp15
9695 * @opcode !11/3
9696 * @oppfx none
9697 * @opcpuid sse
9698 * @opgroup og_sse_mxcsrsm
9699 * @opxcpttype 5
9700 * @optest mxcsr=0 -> op1=0
9701 * @optest mxcsr=0x2083 -> op1=0x2083
9702 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9703 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9704 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9705 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9706 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9707 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9708 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9709 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9710 */
9711FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9712{
9713 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9714 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9715 IEMOP_RAISE_INVALID_OPCODE_RET();
9716
9717 IEM_MC_BEGIN(2, 0);
9718 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9719 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9722 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9723 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9724 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9725 IEM_MC_END();
9726}
9727
9728
9729/**
9730 * @opmaps grp15
9731 * @opcode !11/4
9732 * @oppfx none
9733 * @opcpuid xsave
9734 * @opgroup og_system
9735 * @opxcpttype none
9736 */
9737FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9738{
9739 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9740 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9741 IEMOP_RAISE_INVALID_OPCODE_RET();
9742
9743 IEM_MC_BEGIN(3, 0);
9744 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9745 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9746 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9749 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9750 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9751 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9752 IEM_MC_END();
9753}
9754
9755
9756/**
9757 * @opmaps grp15
9758 * @opcode !11/5
9759 * @oppfx none
9760 * @opcpuid xsave
9761 * @opgroup og_system
9762 * @opxcpttype none
9763 */
9764FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9765{
9766 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9767 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9768 IEMOP_RAISE_INVALID_OPCODE_RET();
9769
9770 IEM_MC_BEGIN(3, 0);
9771 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9772 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9773 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9776 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9777 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9778 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9779 IEM_MC_END();
9780}
9781
9782/** Opcode 0x0f 0xae mem/6. */
9783FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9784
9785/**
9786 * @opmaps grp15
9787 * @opcode !11/7
9788 * @oppfx none
9789 * @opcpuid clfsh
9790 * @opgroup og_cachectl
9791 * @optest op1=1 ->
9792 */
9793FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9794{
9795 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9796 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9797 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9798
9799 IEM_MC_BEGIN(2, 0);
9800 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9801 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9804 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9805 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9806 IEM_MC_END();
9807}
9808
9809/**
9810 * @opmaps grp15
9811 * @opcode !11/7
9812 * @oppfx 0x66
9813 * @opcpuid clflushopt
9814 * @opgroup og_cachectl
9815 * @optest op1=1 ->
9816 */
9817FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9818{
9819 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9820 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9821 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9822
9823 IEM_MC_BEGIN(2, 0);
9824 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9825 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9828 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9829 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9830 IEM_MC_END();
9831}
9832
9833
9834/** Opcode 0x0f 0xae 11b/5. */
9835FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9836{
9837 RT_NOREF_PV(bRm);
9838 IEMOP_MNEMONIC(lfence, "lfence");
9839 IEM_MC_BEGIN(0, 0);
9840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9841#ifdef RT_ARCH_ARM64
9842 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9843#else
9844 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9845 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9846 else
9847 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9848#endif
9849 IEM_MC_ADVANCE_RIP_AND_FINISH();
9850 IEM_MC_END();
9851}
9852
9853
9854/** Opcode 0x0f 0xae 11b/6. */
9855FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9856{
9857 RT_NOREF_PV(bRm);
9858 IEMOP_MNEMONIC(mfence, "mfence");
9859 IEM_MC_BEGIN(0, 0);
9860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9861#ifdef RT_ARCH_ARM64
9862 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9863#else
9864 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9865 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9866 else
9867 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9868#endif
9869 IEM_MC_ADVANCE_RIP_AND_FINISH();
9870 IEM_MC_END();
9871}
9872
9873
9874/** Opcode 0x0f 0xae 11b/7. */
9875FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9876{
9877 RT_NOREF_PV(bRm);
9878 IEMOP_MNEMONIC(sfence, "sfence");
9879 IEM_MC_BEGIN(0, 0);
9880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9881#ifdef RT_ARCH_ARM64
9882 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9883#else
9884 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9885 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9886 else
9887 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9888#endif
9889 IEM_MC_ADVANCE_RIP_AND_FINISH();
9890 IEM_MC_END();
9891}
9892
9893
9894/** Opcode 0xf3 0x0f 0xae 11b/0. */
9895FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9896{
9897 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9898 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9899 {
9900 IEM_MC_BEGIN(1, 0);
9901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9902 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9903 IEM_MC_ARG(uint64_t, u64Dst, 0);
9904 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9905 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9906 IEM_MC_ADVANCE_RIP_AND_FINISH();
9907 IEM_MC_END();
9908 }
9909 else
9910 {
9911 IEM_MC_BEGIN(1, 0);
9912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9913 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9914 IEM_MC_ARG(uint32_t, u32Dst, 0);
9915 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9916 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9917 IEM_MC_ADVANCE_RIP_AND_FINISH();
9918 IEM_MC_END();
9919 }
9920}
9921
9922
9923/** Opcode 0xf3 0x0f 0xae 11b/1. */
9924FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9925{
9926 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9927 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9928 {
9929 IEM_MC_BEGIN(1, 0);
9930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9931 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9932 IEM_MC_ARG(uint64_t, u64Dst, 0);
9933 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9934 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9935 IEM_MC_ADVANCE_RIP_AND_FINISH();
9936 IEM_MC_END();
9937 }
9938 else
9939 {
9940 IEM_MC_BEGIN(1, 0);
9941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9942 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9943 IEM_MC_ARG(uint32_t, u32Dst, 0);
9944 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9945 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9946 IEM_MC_ADVANCE_RIP_AND_FINISH();
9947 IEM_MC_END();
9948 }
9949}
9950
9951
9952/** Opcode 0xf3 0x0f 0xae 11b/2. */
9953FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9954{
9955 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9956 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9957 {
9958 IEM_MC_BEGIN(1, 0);
9959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9960 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9961 IEM_MC_ARG(uint64_t, u64Dst, 0);
9962 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9963 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9964 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9965 IEM_MC_ADVANCE_RIP_AND_FINISH();
9966 IEM_MC_END();
9967 }
9968 else
9969 {
9970 IEM_MC_BEGIN(1, 0);
9971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9972 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9973 IEM_MC_ARG(uint32_t, u32Dst, 0);
9974 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9975 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9976 IEM_MC_ADVANCE_RIP_AND_FINISH();
9977 IEM_MC_END();
9978 }
9979}
9980
9981
9982/** Opcode 0xf3 0x0f 0xae 11b/3. */
9983FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9984{
9985 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9986 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9987 {
9988 IEM_MC_BEGIN(1, 0);
9989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9990 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9991 IEM_MC_ARG(uint64_t, u64Dst, 0);
9992 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9993 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9994 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9995 IEM_MC_ADVANCE_RIP_AND_FINISH();
9996 IEM_MC_END();
9997 }
9998 else
9999 {
10000 IEM_MC_BEGIN(1, 0);
10001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10002 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10003 IEM_MC_ARG(uint32_t, u32Dst, 0);
10004 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10005 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10006 IEM_MC_ADVANCE_RIP_AND_FINISH();
10007 IEM_MC_END();
10008 }
10009}
10010
10011
10012/**
10013 * Group 15 jump table for register variant.
10014 */
10015IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10016{ /* pfx: none, 066h, 0f3h, 0f2h */
10017 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10018 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10019 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10020 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10021 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10022 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10023 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10024 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10025};
10026AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10027
10028
10029/**
10030 * Group 15 jump table for memory variant.
10031 */
10032IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10033{ /* pfx: none, 066h, 0f3h, 0f2h */
10034 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10035 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10036 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10037 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10038 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10039 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10040 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10041 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10042};
10043AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10044
10045
10046/** Opcode 0x0f 0xae. */
10047FNIEMOP_DEF(iemOp_Grp15)
10048{
10049 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10051 if (IEM_IS_MODRM_REG_MODE(bRm))
10052 /* register, register */
10053 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10054 + pVCpu->iem.s.idxPrefix], bRm);
10055 /* memory, register */
10056 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10057 + pVCpu->iem.s.idxPrefix], bRm);
10058}
10059
10060
10061/** Opcode 0x0f 0xaf. */
10062FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10063{
10064 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10065 IEMOP_HLP_MIN_386();
10066 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10067 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10068 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10069}
10070
10071
10072/** Opcode 0x0f 0xb0. */
10073FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10074{
10075 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10076 IEMOP_HLP_MIN_486();
10077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10078
10079 if (IEM_IS_MODRM_REG_MODE(bRm))
10080 {
10081 IEM_MC_BEGIN(4, 0);
10082 IEMOP_HLP_DONE_DECODING();
10083 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10084 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10085 IEM_MC_ARG(uint8_t, u8Src, 2);
10086 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10087
10088 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10089 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10090 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10091 IEM_MC_REF_EFLAGS(pEFlags);
10092 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10093 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10094 else
10095 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10096
10097 IEM_MC_ADVANCE_RIP_AND_FINISH();
10098 IEM_MC_END();
10099 }
10100 else
10101 {
10102 IEM_MC_BEGIN(4, 3);
10103 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10104 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10105 IEM_MC_ARG(uint8_t, u8Src, 2);
10106 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10108 IEM_MC_LOCAL(uint8_t, u8Al);
10109
10110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10111 IEMOP_HLP_DONE_DECODING();
10112 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10113 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10114 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10115 IEM_MC_FETCH_EFLAGS(EFlags);
10116 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10117 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10118 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10119 else
10120 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10121
10122 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10123 IEM_MC_COMMIT_EFLAGS(EFlags);
10124 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10125 IEM_MC_ADVANCE_RIP_AND_FINISH();
10126 IEM_MC_END();
10127 }
10128}
10129
10130/** Opcode 0x0f 0xb1. */
10131FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10132{
10133 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10134 IEMOP_HLP_MIN_486();
10135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10136
10137 if (IEM_IS_MODRM_REG_MODE(bRm))
10138 {
10139 switch (pVCpu->iem.s.enmEffOpSize)
10140 {
10141 case IEMMODE_16BIT:
10142 IEM_MC_BEGIN(4, 0);
10143 IEMOP_HLP_DONE_DECODING();
10144 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10145 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10146 IEM_MC_ARG(uint16_t, u16Src, 2);
10147 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10148
10149 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10150 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10151 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10152 IEM_MC_REF_EFLAGS(pEFlags);
10153 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10154 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10155 else
10156 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10157
10158 IEM_MC_ADVANCE_RIP_AND_FINISH();
10159 IEM_MC_END();
10160 break;
10161
10162 case IEMMODE_32BIT:
10163 IEM_MC_BEGIN(4, 0);
10164 IEMOP_HLP_DONE_DECODING();
10165 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10166 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10167 IEM_MC_ARG(uint32_t, u32Src, 2);
10168 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10169
10170 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10171 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10172 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10173 IEM_MC_REF_EFLAGS(pEFlags);
10174 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10175 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10176 else
10177 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10178
10179 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10180 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10181 } IEM_MC_ELSE() {
10182 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10183 } IEM_MC_ENDIF();
10184
10185 IEM_MC_ADVANCE_RIP_AND_FINISH();
10186 IEM_MC_END();
10187 break;
10188
10189 case IEMMODE_64BIT:
10190 IEM_MC_BEGIN(4, 0);
10191 IEMOP_HLP_DONE_DECODING();
10192 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10193 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10194#ifdef RT_ARCH_X86
10195 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10196#else
10197 IEM_MC_ARG(uint64_t, u64Src, 2);
10198#endif
10199 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10200
10201 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10202 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10203 IEM_MC_REF_EFLAGS(pEFlags);
10204#ifdef RT_ARCH_X86
10205 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10206 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10207 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10208 else
10209 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10210#else
10211 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10212 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10213 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10214 else
10215 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10216#endif
10217
10218 IEM_MC_ADVANCE_RIP_AND_FINISH();
10219 IEM_MC_END();
10220 break;
10221
10222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10223 }
10224 }
10225 else
10226 {
10227 switch (pVCpu->iem.s.enmEffOpSize)
10228 {
10229 case IEMMODE_16BIT:
10230 IEM_MC_BEGIN(4, 3);
10231 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10232 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10233 IEM_MC_ARG(uint16_t, u16Src, 2);
10234 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10236 IEM_MC_LOCAL(uint16_t, u16Ax);
10237
10238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10239 IEMOP_HLP_DONE_DECODING();
10240 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10241 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10242 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10243 IEM_MC_FETCH_EFLAGS(EFlags);
10244 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10245 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10246 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10247 else
10248 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10249
10250 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10251 IEM_MC_COMMIT_EFLAGS(EFlags);
10252 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10253 IEM_MC_ADVANCE_RIP_AND_FINISH();
10254 IEM_MC_END();
10255 break;
10256
10257 case IEMMODE_32BIT:
10258 IEM_MC_BEGIN(4, 3);
10259 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10260 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10261 IEM_MC_ARG(uint32_t, u32Src, 2);
10262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10264 IEM_MC_LOCAL(uint32_t, u32Eax);
10265
10266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10267 IEMOP_HLP_DONE_DECODING();
10268 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10269 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10270 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10271 IEM_MC_FETCH_EFLAGS(EFlags);
10272 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10273 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10274 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10275 else
10276 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10277
10278 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10279 IEM_MC_COMMIT_EFLAGS(EFlags);
10280
10281 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10282 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10283 } IEM_MC_ENDIF();
10284
10285 IEM_MC_ADVANCE_RIP_AND_FINISH();
10286 IEM_MC_END();
10287 break;
10288
10289 case IEMMODE_64BIT:
10290 IEM_MC_BEGIN(4, 3);
10291 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10292 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10293#ifdef RT_ARCH_X86
10294 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10295#else
10296 IEM_MC_ARG(uint64_t, u64Src, 2);
10297#endif
10298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10300 IEM_MC_LOCAL(uint64_t, u64Rax);
10301
10302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10303 IEMOP_HLP_DONE_DECODING();
10304 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10305 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10306 IEM_MC_FETCH_EFLAGS(EFlags);
10307 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10308#ifdef RT_ARCH_X86
10309 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10310 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10311 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10312 else
10313 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10314#else
10315 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10316 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10317 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10318 else
10319 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10320#endif
10321
10322 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10323 IEM_MC_COMMIT_EFLAGS(EFlags);
10324 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10325 IEM_MC_ADVANCE_RIP_AND_FINISH();
10326 IEM_MC_END();
10327 break;
10328
10329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10330 }
10331 }
10332}
10333
10334
10335FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
10336{
10337 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
10338 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
10339
10340 switch (pVCpu->iem.s.enmEffOpSize)
10341 {
10342 case IEMMODE_16BIT:
10343 IEM_MC_BEGIN(5, 1);
10344 IEM_MC_ARG(uint16_t, uSel, 0);
10345 IEM_MC_ARG(uint16_t, offSeg, 1);
10346 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10347 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10348 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10349 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10352 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10353 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
10354 if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
10355 IEM_MC_CALL_CIMPL_5( 0, iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10356 else
10357 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_MODE, iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10358 IEM_MC_END();
10359
10360 case IEMMODE_32BIT:
10361 IEM_MC_BEGIN(5, 1);
10362 IEM_MC_ARG(uint16_t, uSel, 0);
10363 IEM_MC_ARG(uint32_t, offSeg, 1);
10364 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10365 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10366 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10367 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10370 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10371 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
10372 if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
10373 IEM_MC_CALL_CIMPL_5( 0, iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10374 else
10375 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_MODE, iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10376 IEM_MC_END();
10377
10378 case IEMMODE_64BIT:
10379 IEM_MC_BEGIN(5, 1);
10380 IEM_MC_ARG(uint16_t, uSel, 0);
10381 IEM_MC_ARG(uint64_t, offSeg, 1);
10382 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10383 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10384 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10385 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10388 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
10389 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10390 else
10391 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10392 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
10393 IEM_MC_CALL_CIMPL_5(0, iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10394 IEM_MC_END();
10395
10396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10397 }
10398}
10399
10400
10401/** Opcode 0x0f 0xb2. */
10402FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10403{
10404 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10405 IEMOP_HLP_MIN_386();
10406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10407 if (IEM_IS_MODRM_REG_MODE(bRm))
10408 IEMOP_RAISE_INVALID_OPCODE_RET();
10409 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10410}
10411
10412
10413/** Opcode 0x0f 0xb3. */
10414FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10415{
10416 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10417 IEMOP_HLP_MIN_386();
10418 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64, IEM_ACCESS_DATA_RW);
10419 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10420}
10421
10422
10423/** Opcode 0x0f 0xb4. */
10424FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10425{
10426 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10427 IEMOP_HLP_MIN_386();
10428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10429 if (IEM_IS_MODRM_REG_MODE(bRm))
10430 IEMOP_RAISE_INVALID_OPCODE_RET();
10431 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10432}
10433
10434
10435/** Opcode 0x0f 0xb5. */
10436FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10437{
10438 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10439 IEMOP_HLP_MIN_386();
10440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10441 if (IEM_IS_MODRM_REG_MODE(bRm))
10442 IEMOP_RAISE_INVALID_OPCODE_RET();
10443 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10444}
10445
10446
10447/** Opcode 0x0f 0xb6. */
10448FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10449{
10450 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10451 IEMOP_HLP_MIN_386();
10452
10453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10454
10455 /*
10456 * If rm is denoting a register, no more instruction bytes.
10457 */
10458 if (IEM_IS_MODRM_REG_MODE(bRm))
10459 {
10460 switch (pVCpu->iem.s.enmEffOpSize)
10461 {
10462 case IEMMODE_16BIT:
10463 IEM_MC_BEGIN(0, 1);
10464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10465 IEM_MC_LOCAL(uint16_t, u16Value);
10466 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10467 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10468 IEM_MC_ADVANCE_RIP_AND_FINISH();
10469 IEM_MC_END();
10470 break;
10471
10472 case IEMMODE_32BIT:
10473 IEM_MC_BEGIN(0, 1);
10474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10475 IEM_MC_LOCAL(uint32_t, u32Value);
10476 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10477 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10478 IEM_MC_ADVANCE_RIP_AND_FINISH();
10479 IEM_MC_END();
10480 break;
10481
10482 case IEMMODE_64BIT:
10483 IEM_MC_BEGIN(0, 1);
10484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10485 IEM_MC_LOCAL(uint64_t, u64Value);
10486 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10487 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10488 IEM_MC_ADVANCE_RIP_AND_FINISH();
10489 IEM_MC_END();
10490 break;
10491
10492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10493 }
10494 }
10495 else
10496 {
10497 /*
10498 * We're loading a register from memory.
10499 */
10500 switch (pVCpu->iem.s.enmEffOpSize)
10501 {
10502 case IEMMODE_16BIT:
10503 IEM_MC_BEGIN(0, 2);
10504 IEM_MC_LOCAL(uint16_t, u16Value);
10505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10508 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10509 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10510 IEM_MC_ADVANCE_RIP_AND_FINISH();
10511 IEM_MC_END();
10512 break;
10513
10514 case IEMMODE_32BIT:
10515 IEM_MC_BEGIN(0, 2);
10516 IEM_MC_LOCAL(uint32_t, u32Value);
10517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10520 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10521 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10522 IEM_MC_ADVANCE_RIP_AND_FINISH();
10523 IEM_MC_END();
10524 break;
10525
10526 case IEMMODE_64BIT:
10527 IEM_MC_BEGIN(0, 2);
10528 IEM_MC_LOCAL(uint64_t, u64Value);
10529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10532 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10533 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10534 IEM_MC_ADVANCE_RIP_AND_FINISH();
10535 IEM_MC_END();
10536 break;
10537
10538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10539 }
10540 }
10541}
10542
10543
10544/** Opcode 0x0f 0xb7. */
10545FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10546{
10547 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10548 IEMOP_HLP_MIN_386();
10549
10550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10551
10552 /** @todo Not entirely sure how the operand size prefix is handled here,
10553 * assuming that it will be ignored. Would be nice to have a few
10554 * test for this. */
10555 /*
10556 * If rm is denoting a register, no more instruction bytes.
10557 */
10558 if (IEM_IS_MODRM_REG_MODE(bRm))
10559 {
10560 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10561 {
10562 IEM_MC_BEGIN(0, 1);
10563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10564 IEM_MC_LOCAL(uint32_t, u32Value);
10565 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10566 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10567 IEM_MC_ADVANCE_RIP_AND_FINISH();
10568 IEM_MC_END();
10569 }
10570 else
10571 {
10572 IEM_MC_BEGIN(0, 1);
10573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10574 IEM_MC_LOCAL(uint64_t, u64Value);
10575 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10576 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10577 IEM_MC_ADVANCE_RIP_AND_FINISH();
10578 IEM_MC_END();
10579 }
10580 }
10581 else
10582 {
10583 /*
10584 * We're loading a register from memory.
10585 */
10586 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10587 {
10588 IEM_MC_BEGIN(0, 2);
10589 IEM_MC_LOCAL(uint32_t, u32Value);
10590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10593 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10594 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10595 IEM_MC_ADVANCE_RIP_AND_FINISH();
10596 IEM_MC_END();
10597 }
10598 else
10599 {
10600 IEM_MC_BEGIN(0, 2);
10601 IEM_MC_LOCAL(uint64_t, u64Value);
10602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10605 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10606 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10607 IEM_MC_ADVANCE_RIP_AND_FINISH();
10608 IEM_MC_END();
10609 }
10610 }
10611}
10612
10613
10614/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10615FNIEMOP_UD_STUB(iemOp_jmpe);
10616
10617
10618/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10619FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10620{
10621 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10622 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10623 return iemOp_InvalidNeedRM(pVCpu);
10624#ifndef TST_IEM_CHECK_MC
10625# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10626 static const IEMOPBINSIZES s_Native =
10627 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10628# endif
10629 static const IEMOPBINSIZES s_Fallback =
10630 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10631#endif
10632 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10633 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10634}
10635
10636
10637/**
10638 * @opcode 0xb9
10639 * @opinvalid intel-modrm
10640 * @optest ->
10641 */
10642FNIEMOP_DEF(iemOp_Grp10)
10643{
10644 /*
10645 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10646 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10647 */
10648 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10649 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10650 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10651}
10652
10653
10654/**
10655 * Body for group 8 bit instruction.
10656 */
10657#define IEMOP_BODY_BIT_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
10658 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10659 \
10660 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10661 { \
10662 /* register destination. */ \
10663 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10664 \
10665 switch (pVCpu->iem.s.enmEffOpSize) \
10666 { \
10667 case IEMMODE_16BIT: \
10668 IEM_MC_BEGIN(3, 0); \
10669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10670 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10671 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10672 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10673 \
10674 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10675 IEM_MC_REF_EFLAGS(pEFlags); \
10676 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10677 \
10678 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10679 IEM_MC_END(); \
10680 break; \
10681 \
10682 case IEMMODE_32BIT: \
10683 IEM_MC_BEGIN(3, 0); \
10684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10685 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10686 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10687 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10688 \
10689 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10690 IEM_MC_REF_EFLAGS(pEFlags); \
10691 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10692 \
10693 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
10694 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10695 IEM_MC_END(); \
10696 break; \
10697 \
10698 case IEMMODE_64BIT: \
10699 IEM_MC_BEGIN(3, 0); \
10700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10701 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10702 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10703 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10704 \
10705 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10706 IEM_MC_REF_EFLAGS(pEFlags); \
10707 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10708 \
10709 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10710 IEM_MC_END(); \
10711 break; \
10712 \
10713 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10714 } \
10715 } \
10716 else \
10717 { \
10718 /* memory destination. */ \
10719 /** @todo test negative bit offsets! */ \
10720 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10721 { \
10722 switch (pVCpu->iem.s.enmEffOpSize) \
10723 { \
10724 case IEMMODE_16BIT: \
10725 IEM_MC_BEGIN(3, 1); \
10726 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10727 IEM_MC_ARG(uint16_t, u16Src, 1); \
10728 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10730 \
10731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10732 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10733 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10734 IEMOP_HLP_DONE_DECODING(); \
10735 IEM_MC_FETCH_EFLAGS(EFlags); \
10736 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10737 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10738 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
10739 \
10740 IEM_MC_COMMIT_EFLAGS(EFlags); \
10741 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10742 IEM_MC_END(); \
10743 break; \
10744 \
10745 case IEMMODE_32BIT: \
10746 IEM_MC_BEGIN(3, 1); \
10747 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10748 IEM_MC_ARG(uint32_t, u32Src, 1); \
10749 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10751 \
10752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10753 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10754 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10755 IEMOP_HLP_DONE_DECODING(); \
10756 IEM_MC_FETCH_EFLAGS(EFlags); \
10757 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10758 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10759 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
10760 \
10761 IEM_MC_COMMIT_EFLAGS(EFlags); \
10762 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10763 IEM_MC_END(); \
10764 break; \
10765 \
10766 case IEMMODE_64BIT: \
10767 IEM_MC_BEGIN(3, 1); \
10768 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10769 IEM_MC_ARG(uint64_t, u64Src, 1); \
10770 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10772 \
10773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10774 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10775 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10776 IEMOP_HLP_DONE_DECODING(); \
10777 IEM_MC_FETCH_EFLAGS(EFlags); \
10778 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10779 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10780 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
10781 \
10782 IEM_MC_COMMIT_EFLAGS(EFlags); \
10783 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10784 IEM_MC_END(); \
10785 break; \
10786 \
10787 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10788 } \
10789 } \
10790 else \
10791 { \
10792 (void)0
10793
10794#define IEMOP_BODY_BIT_Ev_Ib_NO_LOCK() \
10795 IEMOP_HLP_DONE_DECODING(); \
10796 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
10797 } \
10798 } \
10799 (void)0
10800
10801#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10802 switch (pVCpu->iem.s.enmEffOpSize) \
10803 { \
10804 case IEMMODE_16BIT: \
10805 IEM_MC_BEGIN(3, 1); \
10806 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10807 IEM_MC_ARG(uint16_t, u16Src, 1); \
10808 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10810 \
10811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10812 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10813 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10814 IEMOP_HLP_DONE_DECODING(); \
10815 IEM_MC_FETCH_EFLAGS(EFlags); \
10816 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10817 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10818 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
10819 \
10820 IEM_MC_COMMIT_EFLAGS(EFlags); \
10821 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10822 IEM_MC_END(); \
10823 break; \
10824 \
10825 case IEMMODE_32BIT: \
10826 IEM_MC_BEGIN(3, 1); \
10827 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10828 IEM_MC_ARG(uint32_t, u32Src, 1); \
10829 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10831 \
10832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10833 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10834 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10835 IEMOP_HLP_DONE_DECODING(); \
10836 IEM_MC_FETCH_EFLAGS(EFlags); \
10837 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10838 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10839 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
10840 \
10841 IEM_MC_COMMIT_EFLAGS(EFlags); \
10842 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10843 IEM_MC_END(); \
10844 break; \
10845 \
10846 case IEMMODE_64BIT: \
10847 IEM_MC_BEGIN(3, 1); \
10848 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10849 IEM_MC_ARG(uint64_t, u64Src, 1); \
10850 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10852 \
10853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10854 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10855 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10856 IEMOP_HLP_DONE_DECODING(); \
10857 IEM_MC_FETCH_EFLAGS(EFlags); \
10858 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10859 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10860 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
10861 \
10862 IEM_MC_COMMIT_EFLAGS(EFlags); \
10863 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10864 IEM_MC_END(); \
10865 break; \
10866 \
10867 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10868 } \
10869 } \
10870 } \
10871 (void)0
10872
10873
10874/** Opcode 0x0f 0xba /4. */
10875FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
10876{
10877 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
10878 IEMOP_BODY_BIT_Ev_Ib(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64, IEM_ACCESS_DATA_R);
10879 IEMOP_BODY_BIT_Ev_Ib_NO_LOCK();
10880}
10881
10882
10883/** Opcode 0x0f 0xba /5. */
10884FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
10885{
10886 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
10887 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64, IEM_ACCESS_DATA_RW);
10888 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
10889}
10890
10891
10892/** Opcode 0x0f 0xba /6. */
10893FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
10894{
10895 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
10896 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64, IEM_ACCESS_DATA_RW);
10897 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10898}
10899
10900
10901/** Opcode 0x0f 0xba /7. */
10902FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
10903{
10904 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
10905 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64, IEM_ACCESS_DATA_RW);
10906 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10907}
10908
10909
10910/** Opcode 0x0f 0xba. */
10911FNIEMOP_DEF(iemOp_Grp8)
10912{
10913 IEMOP_HLP_MIN_386();
10914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10915 switch (IEM_GET_MODRM_REG_8(bRm))
10916 {
10917 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
10918 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
10919 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
10920 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
10921
10922 case 0: case 1: case 2: case 3:
10923 /* Both AMD and Intel want full modr/m decoding and imm8. */
10924 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10925
10926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10927 }
10928}
10929
10930
10931/** Opcode 0x0f 0xbb. */
10932FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10933{
10934 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10935 IEMOP_HLP_MIN_386();
10936 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64, IEM_ACCESS_DATA_RW);
10937 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10938}
10939
10940
10941/**
10942 * Common worker for BSF and BSR instructions.
10943 *
10944 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10945 * the destination register, which means that for 32-bit operations the high
10946 * bits must be left alone.
10947 *
10948 * @param pImpl Pointer to the instruction implementation (assembly).
10949 */
10950FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10951{
10952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10953
10954 /*
10955 * If rm is denoting a register, no more instruction bytes.
10956 */
10957 if (IEM_IS_MODRM_REG_MODE(bRm))
10958 {
10959 switch (pVCpu->iem.s.enmEffOpSize)
10960 {
10961 case IEMMODE_16BIT:
10962 IEM_MC_BEGIN(3, 0);
10963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10964 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10965 IEM_MC_ARG(uint16_t, u16Src, 1);
10966 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10967
10968 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10969 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10970 IEM_MC_REF_EFLAGS(pEFlags);
10971 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10972
10973 IEM_MC_ADVANCE_RIP_AND_FINISH();
10974 IEM_MC_END();
10975 break;
10976
10977 case IEMMODE_32BIT:
10978 IEM_MC_BEGIN(3, 0);
10979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10980 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10981 IEM_MC_ARG(uint32_t, u32Src, 1);
10982 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10983
10984 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10985 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10986 IEM_MC_REF_EFLAGS(pEFlags);
10987 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10988 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10989 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10990 } IEM_MC_ENDIF();
10991 IEM_MC_ADVANCE_RIP_AND_FINISH();
10992 IEM_MC_END();
10993 break;
10994
10995 case IEMMODE_64BIT:
10996 IEM_MC_BEGIN(3, 0);
10997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10998 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10999 IEM_MC_ARG(uint64_t, u64Src, 1);
11000 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11001
11002 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11003 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11004 IEM_MC_REF_EFLAGS(pEFlags);
11005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11006
11007 IEM_MC_ADVANCE_RIP_AND_FINISH();
11008 IEM_MC_END();
11009 break;
11010
11011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11012 }
11013 }
11014 else
11015 {
11016 /*
11017 * We're accessing memory.
11018 */
11019 switch (pVCpu->iem.s.enmEffOpSize)
11020 {
11021 case IEMMODE_16BIT:
11022 IEM_MC_BEGIN(3, 1);
11023 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11024 IEM_MC_ARG(uint16_t, u16Src, 1);
11025 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11027
11028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11030 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11031 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11032 IEM_MC_REF_EFLAGS(pEFlags);
11033 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11034
11035 IEM_MC_ADVANCE_RIP_AND_FINISH();
11036 IEM_MC_END();
11037 break;
11038
11039 case IEMMODE_32BIT:
11040 IEM_MC_BEGIN(3, 1);
11041 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11042 IEM_MC_ARG(uint32_t, u32Src, 1);
11043 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11045
11046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11048 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11049 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11050 IEM_MC_REF_EFLAGS(pEFlags);
11051 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11052
11053 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11054 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11055 } IEM_MC_ENDIF();
11056 IEM_MC_ADVANCE_RIP_AND_FINISH();
11057 IEM_MC_END();
11058 break;
11059
11060 case IEMMODE_64BIT:
11061 IEM_MC_BEGIN(3, 1);
11062 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11063 IEM_MC_ARG(uint64_t, u64Src, 1);
11064 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11066
11067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11069 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11070 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11071 IEM_MC_REF_EFLAGS(pEFlags);
11072 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11073
11074 IEM_MC_ADVANCE_RIP_AND_FINISH();
11075 IEM_MC_END();
11076 break;
11077
11078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11079 }
11080 }
11081}
11082
11083
11084/** Opcode 0x0f 0xbc. */
11085FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11086{
11087 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11088 IEMOP_HLP_MIN_386();
11089 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11090 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11091}
11092
11093
11094/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11095FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11096{
11097 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11098 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11099 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11100
11101#ifndef TST_IEM_CHECK_MC
11102 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11103 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11104 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11105 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11106 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11107 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11108 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11109 {
11110 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11111 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11112 };
11113#endif
11114 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11115 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11116 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11117 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11118}
11119
11120
11121/** Opcode 0x0f 0xbd. */
11122FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11123{
11124 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11125 IEMOP_HLP_MIN_386();
11126 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11127 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11128}
11129
11130
11131/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11132FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11133{
11134 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11135 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11136 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11137
11138#ifndef TST_IEM_CHECK_MC
11139 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11140 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11141 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11142 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11143 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11144 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11145 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11146 {
11147 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11148 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11149 };
11150#endif
11151 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11152 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11153 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11154 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11155}
11156
11157
11158
11159/** Opcode 0x0f 0xbe. */
11160FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11161{
11162 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11163 IEMOP_HLP_MIN_386();
11164
11165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11166
11167 /*
11168 * If rm is denoting a register, no more instruction bytes.
11169 */
11170 if (IEM_IS_MODRM_REG_MODE(bRm))
11171 {
11172 switch (pVCpu->iem.s.enmEffOpSize)
11173 {
11174 case IEMMODE_16BIT:
11175 IEM_MC_BEGIN(0, 1);
11176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11177 IEM_MC_LOCAL(uint16_t, u16Value);
11178 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11179 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11180 IEM_MC_ADVANCE_RIP_AND_FINISH();
11181 IEM_MC_END();
11182 break;
11183
11184 case IEMMODE_32BIT:
11185 IEM_MC_BEGIN(0, 1);
11186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11187 IEM_MC_LOCAL(uint32_t, u32Value);
11188 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11189 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11190 IEM_MC_ADVANCE_RIP_AND_FINISH();
11191 IEM_MC_END();
11192 break;
11193
11194 case IEMMODE_64BIT:
11195 IEM_MC_BEGIN(0, 1);
11196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11197 IEM_MC_LOCAL(uint64_t, u64Value);
11198 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11199 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11200 IEM_MC_ADVANCE_RIP_AND_FINISH();
11201 IEM_MC_END();
11202 break;
11203
11204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11205 }
11206 }
11207 else
11208 {
11209 /*
11210 * We're loading a register from memory.
11211 */
11212 switch (pVCpu->iem.s.enmEffOpSize)
11213 {
11214 case IEMMODE_16BIT:
11215 IEM_MC_BEGIN(0, 2);
11216 IEM_MC_LOCAL(uint16_t, u16Value);
11217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11220 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11221 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11222 IEM_MC_ADVANCE_RIP_AND_FINISH();
11223 IEM_MC_END();
11224 break;
11225
11226 case IEMMODE_32BIT:
11227 IEM_MC_BEGIN(0, 2);
11228 IEM_MC_LOCAL(uint32_t, u32Value);
11229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11232 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11233 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11234 IEM_MC_ADVANCE_RIP_AND_FINISH();
11235 IEM_MC_END();
11236 break;
11237
11238 case IEMMODE_64BIT:
11239 IEM_MC_BEGIN(0, 2);
11240 IEM_MC_LOCAL(uint64_t, u64Value);
11241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11244 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11245 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11246 IEM_MC_ADVANCE_RIP_AND_FINISH();
11247 IEM_MC_END();
11248 break;
11249
11250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11251 }
11252 }
11253}
11254
11255
11256/** Opcode 0x0f 0xbf. */
11257FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11258{
11259 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11260 IEMOP_HLP_MIN_386();
11261
11262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11263
11264 /** @todo Not entirely sure how the operand size prefix is handled here,
11265 * assuming that it will be ignored. Would be nice to have a few
11266 * test for this. */
11267 /*
11268 * If rm is denoting a register, no more instruction bytes.
11269 */
11270 if (IEM_IS_MODRM_REG_MODE(bRm))
11271 {
11272 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11273 {
11274 IEM_MC_BEGIN(0, 1);
11275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11276 IEM_MC_LOCAL(uint32_t, u32Value);
11277 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11278 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11279 IEM_MC_ADVANCE_RIP_AND_FINISH();
11280 IEM_MC_END();
11281 }
11282 else
11283 {
11284 IEM_MC_BEGIN(0, 1);
11285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11286 IEM_MC_LOCAL(uint64_t, u64Value);
11287 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11288 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11289 IEM_MC_ADVANCE_RIP_AND_FINISH();
11290 IEM_MC_END();
11291 }
11292 }
11293 else
11294 {
11295 /*
11296 * We're loading a register from memory.
11297 */
11298 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11299 {
11300 IEM_MC_BEGIN(0, 2);
11301 IEM_MC_LOCAL(uint32_t, u32Value);
11302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11305 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11306 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11307 IEM_MC_ADVANCE_RIP_AND_FINISH();
11308 IEM_MC_END();
11309 }
11310 else
11311 {
11312 IEM_MC_BEGIN(0, 2);
11313 IEM_MC_LOCAL(uint64_t, u64Value);
11314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11317 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11318 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11319 IEM_MC_ADVANCE_RIP_AND_FINISH();
11320 IEM_MC_END();
11321 }
11322 }
11323}
11324
11325
11326/** Opcode 0x0f 0xc0. */
11327FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11328{
11329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11330 IEMOP_HLP_MIN_486();
11331 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11332
11333 /*
11334 * If rm is denoting a register, no more instruction bytes.
11335 */
11336 if (IEM_IS_MODRM_REG_MODE(bRm))
11337 {
11338 IEM_MC_BEGIN(3, 0);
11339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11340 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11341 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11342 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11343
11344 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11345 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11346 IEM_MC_REF_EFLAGS(pEFlags);
11347 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11348
11349 IEM_MC_ADVANCE_RIP_AND_FINISH();
11350 IEM_MC_END();
11351 }
11352 else
11353 {
11354 /*
11355 * We're accessing memory.
11356 */
11357 IEM_MC_BEGIN(3, 3);
11358 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11359 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11360 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11361 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11363
11364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11366 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11367 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11368 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11369 IEM_MC_FETCH_EFLAGS(EFlags);
11370 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11371 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11372 else
11373 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11374
11375 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11376 IEM_MC_COMMIT_EFLAGS(EFlags);
11377 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11378 IEM_MC_ADVANCE_RIP_AND_FINISH();
11379 IEM_MC_END();
11380 }
11381}
11382
11383
11384/** Opcode 0x0f 0xc1. */
11385FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11386{
11387 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11388 IEMOP_HLP_MIN_486();
11389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11390
11391 /*
11392 * If rm is denoting a register, no more instruction bytes.
11393 */
11394 if (IEM_IS_MODRM_REG_MODE(bRm))
11395 {
11396 switch (pVCpu->iem.s.enmEffOpSize)
11397 {
11398 case IEMMODE_16BIT:
11399 IEM_MC_BEGIN(3, 0);
11400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11401 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11402 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11403 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11404
11405 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11406 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11407 IEM_MC_REF_EFLAGS(pEFlags);
11408 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11409
11410 IEM_MC_ADVANCE_RIP_AND_FINISH();
11411 IEM_MC_END();
11412 break;
11413
11414 case IEMMODE_32BIT:
11415 IEM_MC_BEGIN(3, 0);
11416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11417 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11418 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11419 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11420
11421 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11422 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11423 IEM_MC_REF_EFLAGS(pEFlags);
11424 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11425
11426 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11427 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11428 IEM_MC_ADVANCE_RIP_AND_FINISH();
11429 IEM_MC_END();
11430 break;
11431
11432 case IEMMODE_64BIT:
11433 IEM_MC_BEGIN(3, 0);
11434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11435 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11436 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11437 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11438
11439 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11440 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11441 IEM_MC_REF_EFLAGS(pEFlags);
11442 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11443
11444 IEM_MC_ADVANCE_RIP_AND_FINISH();
11445 IEM_MC_END();
11446 break;
11447
11448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11449 }
11450 }
11451 else
11452 {
11453 /*
11454 * We're accessing memory.
11455 */
11456 switch (pVCpu->iem.s.enmEffOpSize)
11457 {
11458 case IEMMODE_16BIT:
11459 IEM_MC_BEGIN(3, 3);
11460 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11461 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11462 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11463 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11465
11466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11468 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11469 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11470 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11471 IEM_MC_FETCH_EFLAGS(EFlags);
11472 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11473 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11474 else
11475 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11476
11477 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11478 IEM_MC_COMMIT_EFLAGS(EFlags);
11479 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11480 IEM_MC_ADVANCE_RIP_AND_FINISH();
11481 IEM_MC_END();
11482 break;
11483
11484 case IEMMODE_32BIT:
11485 IEM_MC_BEGIN(3, 3);
11486 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11487 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11488 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11489 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11491
11492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11494 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11495 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11496 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11497 IEM_MC_FETCH_EFLAGS(EFlags);
11498 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11499 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11500 else
11501 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11502
11503 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11504 IEM_MC_COMMIT_EFLAGS(EFlags);
11505 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11506 IEM_MC_ADVANCE_RIP_AND_FINISH();
11507 IEM_MC_END();
11508 break;
11509
11510 case IEMMODE_64BIT:
11511 IEM_MC_BEGIN(3, 3);
11512 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11513 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11514 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11515 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11517
11518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11520 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11521 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11522 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11523 IEM_MC_FETCH_EFLAGS(EFlags);
11524 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11525 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11526 else
11527 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11528
11529 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11530 IEM_MC_COMMIT_EFLAGS(EFlags);
11531 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11532 IEM_MC_ADVANCE_RIP_AND_FINISH();
11533 IEM_MC_END();
11534 break;
11535
11536 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11537 }
11538 }
11539}
11540
11541
11542/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11543FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11544{
11545 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11546
11547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11548 if (IEM_IS_MODRM_REG_MODE(bRm))
11549 {
11550 /*
11551 * XMM, XMM.
11552 */
11553 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11554 IEM_MC_BEGIN(4, 2);
11555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11556 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11557 IEM_MC_LOCAL(X86XMMREG, Dst);
11558 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11559 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11560 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11561 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11562 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11563 IEM_MC_PREPARE_SSE_USAGE();
11564 IEM_MC_REF_MXCSR(pfMxcsr);
11565 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11566 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11567 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11568 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11569 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11570 } IEM_MC_ELSE() {
11571 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11572 } IEM_MC_ENDIF();
11573
11574 IEM_MC_ADVANCE_RIP_AND_FINISH();
11575 IEM_MC_END();
11576 }
11577 else
11578 {
11579 /*
11580 * XMM, [mem128].
11581 */
11582 IEM_MC_BEGIN(4, 3);
11583 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11584 IEM_MC_LOCAL(X86XMMREG, Dst);
11585 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11586 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11587 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11589
11590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11591 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11592 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11594 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11595 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11596
11597 IEM_MC_PREPARE_SSE_USAGE();
11598 IEM_MC_REF_MXCSR(pfMxcsr);
11599 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11600 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11601 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11602 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11603 } IEM_MC_ELSE() {
11604 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11605 } IEM_MC_ENDIF();
11606
11607 IEM_MC_ADVANCE_RIP_AND_FINISH();
11608 IEM_MC_END();
11609 }
11610}
11611
11612
11613/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11614FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11615{
11616 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11617
11618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11619 if (IEM_IS_MODRM_REG_MODE(bRm))
11620 {
11621 /*
11622 * XMM, XMM.
11623 */
11624 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11625 IEM_MC_BEGIN(4, 2);
11626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11627 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11628 IEM_MC_LOCAL(X86XMMREG, Dst);
11629 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11630 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11631 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11632 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11633 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11634 IEM_MC_PREPARE_SSE_USAGE();
11635 IEM_MC_REF_MXCSR(pfMxcsr);
11636 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11637 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11638 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11639 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11640 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11641 } IEM_MC_ELSE() {
11642 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11643 } IEM_MC_ENDIF();
11644
11645 IEM_MC_ADVANCE_RIP_AND_FINISH();
11646 IEM_MC_END();
11647 }
11648 else
11649 {
11650 /*
11651 * XMM, [mem128].
11652 */
11653 IEM_MC_BEGIN(4, 3);
11654 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11655 IEM_MC_LOCAL(X86XMMREG, Dst);
11656 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11657 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11658 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11660
11661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11662 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11663 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11666 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11667
11668 IEM_MC_PREPARE_SSE_USAGE();
11669 IEM_MC_REF_MXCSR(pfMxcsr);
11670 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11671 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11672 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11673 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11674 } IEM_MC_ELSE() {
11675 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11676 } IEM_MC_ENDIF();
11677
11678 IEM_MC_ADVANCE_RIP_AND_FINISH();
11679 IEM_MC_END();
11680 }
11681}
11682
11683
11684/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11685FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11686{
11687 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11688
11689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11690 if (IEM_IS_MODRM_REG_MODE(bRm))
11691 {
11692 /*
11693 * XMM32, XMM32.
11694 */
11695 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11696 IEM_MC_BEGIN(4, 2);
11697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11698 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11699 IEM_MC_LOCAL(X86XMMREG, Dst);
11700 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11701 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11702 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11703 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11704 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11705 IEM_MC_PREPARE_SSE_USAGE();
11706 IEM_MC_REF_MXCSR(pfMxcsr);
11707 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11708 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11709 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11710 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11711 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11712 } IEM_MC_ELSE() {
11713 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11714 } IEM_MC_ENDIF();
11715
11716 IEM_MC_ADVANCE_RIP_AND_FINISH();
11717 IEM_MC_END();
11718 }
11719 else
11720 {
11721 /*
11722 * XMM32, [mem32].
11723 */
11724 IEM_MC_BEGIN(4, 3);
11725 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11726 IEM_MC_LOCAL(X86XMMREG, Dst);
11727 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11728 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11729 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11731
11732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11733 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11734 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11736 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11737 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11738
11739 IEM_MC_PREPARE_SSE_USAGE();
11740 IEM_MC_REF_MXCSR(pfMxcsr);
11741 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11742 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11743 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11744 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11745 } IEM_MC_ELSE() {
11746 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11747 } IEM_MC_ENDIF();
11748
11749 IEM_MC_ADVANCE_RIP_AND_FINISH();
11750 IEM_MC_END();
11751 }
11752}
11753
11754
11755/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11756FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11757{
11758 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11759
11760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11761 if (IEM_IS_MODRM_REG_MODE(bRm))
11762 {
11763 /*
11764 * XMM64, XMM64.
11765 */
11766 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11767 IEM_MC_BEGIN(4, 2);
11768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11769 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11770 IEM_MC_LOCAL(X86XMMREG, Dst);
11771 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11772 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11773 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11774 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11775 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11776 IEM_MC_PREPARE_SSE_USAGE();
11777 IEM_MC_REF_MXCSR(pfMxcsr);
11778 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11779 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11780 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11781 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11782 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11783 } IEM_MC_ELSE() {
11784 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11785 } IEM_MC_ENDIF();
11786
11787 IEM_MC_ADVANCE_RIP_AND_FINISH();
11788 IEM_MC_END();
11789 }
11790 else
11791 {
11792 /*
11793 * XMM64, [mem64].
11794 */
11795 IEM_MC_BEGIN(4, 3);
11796 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11797 IEM_MC_LOCAL(X86XMMREG, Dst);
11798 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11799 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11800 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11802
11803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11804 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11805 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11807 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11808 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11809
11810 IEM_MC_PREPARE_SSE_USAGE();
11811 IEM_MC_REF_MXCSR(pfMxcsr);
11812 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11813 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11814 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11815 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11816 } IEM_MC_ELSE() {
11817 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11818 } IEM_MC_ENDIF();
11819
11820 IEM_MC_ADVANCE_RIP_AND_FINISH();
11821 IEM_MC_END();
11822 }
11823}
11824
11825
11826/** Opcode 0x0f 0xc3. */
11827FNIEMOP_DEF(iemOp_movnti_My_Gy)
11828{
11829 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11830
11831 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11832
11833 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11834 if (IEM_IS_MODRM_MEM_MODE(bRm))
11835 {
11836 switch (pVCpu->iem.s.enmEffOpSize)
11837 {
11838 case IEMMODE_32BIT:
11839 IEM_MC_BEGIN(0, 2);
11840 IEM_MC_LOCAL(uint32_t, u32Value);
11841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11842
11843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11845
11846 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11847 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11848 IEM_MC_ADVANCE_RIP_AND_FINISH();
11849 IEM_MC_END();
11850 break;
11851
11852 case IEMMODE_64BIT:
11853 IEM_MC_BEGIN(0, 2);
11854 IEM_MC_LOCAL(uint64_t, u64Value);
11855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11856
11857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11859
11860 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11861 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11862 IEM_MC_ADVANCE_RIP_AND_FINISH();
11863 IEM_MC_END();
11864 break;
11865
11866 case IEMMODE_16BIT:
11867 /** @todo check this form. */
11868 IEMOP_RAISE_INVALID_OPCODE_RET();
11869
11870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11871 }
11872 }
11873 else
11874 IEMOP_RAISE_INVALID_OPCODE_RET();
11875}
11876
11877
11878/* Opcode 0x66 0x0f 0xc3 - invalid */
11879/* Opcode 0xf3 0x0f 0xc3 - invalid */
11880/* Opcode 0xf2 0x0f 0xc3 - invalid */
11881
11882
11883/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11884FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11885{
11886 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
11887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11888 if (IEM_IS_MODRM_REG_MODE(bRm))
11889 {
11890 /*
11891 * Register, register.
11892 */
11893 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11894 IEM_MC_BEGIN(3, 0);
11895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11896 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11897 IEM_MC_ARG(uint16_t, u16Src, 1);
11898 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11899 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11900 IEM_MC_PREPARE_FPU_USAGE();
11901 IEM_MC_FPU_TO_MMX_MODE();
11902 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11903 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11904 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11905 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11906 IEM_MC_ADVANCE_RIP_AND_FINISH();
11907 IEM_MC_END();
11908 }
11909 else
11910 {
11911 /*
11912 * Register, memory.
11913 */
11914 IEM_MC_BEGIN(3, 1);
11915 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11916 IEM_MC_ARG(uint16_t, u16Src, 1);
11917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11918
11919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11920 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11921 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11923 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11924 IEM_MC_PREPARE_FPU_USAGE();
11925 IEM_MC_FPU_TO_MMX_MODE();
11926
11927 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11928 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11929 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11930 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11931 IEM_MC_ADVANCE_RIP_AND_FINISH();
11932 IEM_MC_END();
11933 }
11934}
11935
11936
11937/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11938FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11939{
11940 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11942 if (IEM_IS_MODRM_REG_MODE(bRm))
11943 {
11944 /*
11945 * Register, register.
11946 */
11947 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11948 IEM_MC_BEGIN(3, 0);
11949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11950 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11951 IEM_MC_ARG(uint16_t, u16Src, 1);
11952 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11953 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11954 IEM_MC_PREPARE_SSE_USAGE();
11955 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11956 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11957 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11958 IEM_MC_ADVANCE_RIP_AND_FINISH();
11959 IEM_MC_END();
11960 }
11961 else
11962 {
11963 /*
11964 * Register, memory.
11965 */
11966 IEM_MC_BEGIN(3, 2);
11967 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11968 IEM_MC_ARG(uint16_t, u16Src, 1);
11969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11970
11971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11972 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11973 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11975 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11976 IEM_MC_PREPARE_SSE_USAGE();
11977
11978 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11979 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11980 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11981 IEM_MC_ADVANCE_RIP_AND_FINISH();
11982 IEM_MC_END();
11983 }
11984}
11985
11986
11987/* Opcode 0xf3 0x0f 0xc4 - invalid */
11988/* Opcode 0xf2 0x0f 0xc4 - invalid */
11989
11990
11991/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11992FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11993{
11994 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
11995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11996 if (IEM_IS_MODRM_REG_MODE(bRm))
11997 {
11998 /*
11999 * Greg32, MMX, imm8.
12000 */
12001 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12002 IEM_MC_BEGIN(3, 1);
12003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12004 IEM_MC_LOCAL(uint16_t, u16Dst);
12005 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12006 IEM_MC_ARG(uint64_t, u64Src, 1);
12007 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12008 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12009 IEM_MC_PREPARE_FPU_USAGE();
12010 IEM_MC_FPU_TO_MMX_MODE();
12011 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12012 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12013 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12014 IEM_MC_ADVANCE_RIP_AND_FINISH();
12015 IEM_MC_END();
12016 }
12017 /* No memory operand. */
12018 else
12019 IEMOP_RAISE_INVALID_OPCODE_RET();
12020}
12021
12022
12023/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12024FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12025{
12026 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12028 if (IEM_IS_MODRM_REG_MODE(bRm))
12029 {
12030 /*
12031 * Greg32, XMM, imm8.
12032 */
12033 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12034 IEM_MC_BEGIN(3, 1);
12035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12036 IEM_MC_LOCAL(uint16_t, u16Dst);
12037 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12038 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12039 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12040 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12041 IEM_MC_PREPARE_SSE_USAGE();
12042 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12043 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12044 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12045 IEM_MC_ADVANCE_RIP_AND_FINISH();
12046 IEM_MC_END();
12047 }
12048 /* No memory operand. */
12049 else
12050 IEMOP_RAISE_INVALID_OPCODE_RET();
12051}
12052
12053
12054/* Opcode 0xf3 0x0f 0xc5 - invalid */
12055/* Opcode 0xf2 0x0f 0xc5 - invalid */
12056
12057
12058/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12059FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12060{
12061 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12063 if (IEM_IS_MODRM_REG_MODE(bRm))
12064 {
12065 /*
12066 * XMM, XMM, imm8.
12067 */
12068 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12069 IEM_MC_BEGIN(3, 0);
12070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12071 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12072 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12073 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12074 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12075 IEM_MC_PREPARE_SSE_USAGE();
12076 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12077 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12078 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12079 IEM_MC_ADVANCE_RIP_AND_FINISH();
12080 IEM_MC_END();
12081 }
12082 else
12083 {
12084 /*
12085 * XMM, [mem128], imm8.
12086 */
12087 IEM_MC_BEGIN(3, 2);
12088 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12089 IEM_MC_LOCAL(RTUINT128U, uSrc);
12090 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12092
12093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12094 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12095 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12097 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12098 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12099
12100 IEM_MC_PREPARE_SSE_USAGE();
12101 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12102 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12103
12104 IEM_MC_ADVANCE_RIP_AND_FINISH();
12105 IEM_MC_END();
12106 }
12107}
12108
12109
12110/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12111FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12112{
12113 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12115 if (IEM_IS_MODRM_REG_MODE(bRm))
12116 {
12117 /*
12118 * XMM, XMM, imm8.
12119 */
12120 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12121 IEM_MC_BEGIN(3, 0);
12122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12123 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12124 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12125 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12126 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12127 IEM_MC_PREPARE_SSE_USAGE();
12128 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12129 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12130 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12131 IEM_MC_ADVANCE_RIP_AND_FINISH();
12132 IEM_MC_END();
12133 }
12134 else
12135 {
12136 /*
12137 * XMM, [mem128], imm8.
12138 */
12139 IEM_MC_BEGIN(3, 2);
12140 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12141 IEM_MC_LOCAL(RTUINT128U, uSrc);
12142 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12144
12145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12146 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12147 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12149 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12150 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12151
12152 IEM_MC_PREPARE_SSE_USAGE();
12153 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12154 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12155
12156 IEM_MC_ADVANCE_RIP_AND_FINISH();
12157 IEM_MC_END();
12158 }
12159}
12160
12161
12162/* Opcode 0xf3 0x0f 0xc6 - invalid */
12163/* Opcode 0xf2 0x0f 0xc6 - invalid */
12164
12165
12166/** Opcode 0x0f 0xc7 !11/1. */
12167FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12168{
12169 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12170
12171 IEM_MC_BEGIN(4, 3);
12172 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12173 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12174 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12176 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12177 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12179
12180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12181 IEMOP_HLP_DONE_DECODING();
12182 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12183
12184 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12185 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12186 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12187
12188 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12189 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12190 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12191
12192 IEM_MC_FETCH_EFLAGS(EFlags);
12193 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12194 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12195 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12196 else
12197 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12198
12199 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12200 IEM_MC_COMMIT_EFLAGS(EFlags);
12201 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12202 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12203 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12204 } IEM_MC_ENDIF();
12205 IEM_MC_ADVANCE_RIP_AND_FINISH();
12206
12207 IEM_MC_END();
12208}
12209
12210
12211/** Opcode REX.W 0x0f 0xc7 !11/1. */
12212FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12213{
12214 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12215 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12216 {
12217 IEM_MC_BEGIN(4, 3);
12218 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12219 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12220 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12221 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12222 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12223 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12225
12226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12227 IEMOP_HLP_DONE_DECODING();
12228 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12229 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12230
12231 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12232 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12233 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12234
12235 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12236 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12237 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12238
12239 IEM_MC_FETCH_EFLAGS(EFlags);
12240
12241#ifdef RT_ARCH_AMD64 /* some code duplication here because IEMAllInstPython.py cannot parse if/else/#if spaghetti. */
12242 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12243 {
12244 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12245 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12246 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12247 else
12248 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12249 }
12250 else
12251 { /* (see comments in #else case below) */
12252 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12253 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12254 else
12255 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12256 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12257 }
12258
12259#elif defined(RT_ARCH_ARM64)
12260 /** @todo may require fallback for unaligned accesses... */
12261 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12262 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12263 else
12264 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12265
12266#else
12267 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12268 accesses and not all all atomic, which works fine on in UNI CPU guest
12269 configuration (ignoring DMA). If guest SMP is active we have no choice
12270 but to use a rendezvous callback here. Sigh. */
12271 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12272 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12273 else
12274 {
12275 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12276 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12277 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12278 }
12279#endif
12280
12281 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12282 IEM_MC_COMMIT_EFLAGS(EFlags);
12283 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12284 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12285 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12286 } IEM_MC_ENDIF();
12287 IEM_MC_ADVANCE_RIP_AND_FINISH();
12288
12289 IEM_MC_END();
12290 }
12291 Log(("cmpxchg16b -> #UD\n"));
12292 IEMOP_RAISE_INVALID_OPCODE_RET();
12293}
12294
12295FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12296{
12297 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12298 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12299 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12300}
12301
12302
12303/** Opcode 0x0f 0xc7 11/6. */
12304FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12305{
12306 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12307 IEMOP_RAISE_INVALID_OPCODE_RET();
12308
12309 if (IEM_IS_MODRM_REG_MODE(bRm))
12310 {
12311 /* register destination. */
12312 switch (pVCpu->iem.s.enmEffOpSize)
12313 {
12314 case IEMMODE_16BIT:
12315 IEM_MC_BEGIN(2, 0);
12316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12317 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12318 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12319
12320 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12321 IEM_MC_REF_EFLAGS(pEFlags);
12322 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback),
12323 pu16Dst, pEFlags);
12324
12325 IEM_MC_ADVANCE_RIP_AND_FINISH();
12326 IEM_MC_END();
12327 break;
12328
12329 case IEMMODE_32BIT:
12330 IEM_MC_BEGIN(2, 0);
12331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12332 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12333 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12334
12335 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12336 IEM_MC_REF_EFLAGS(pEFlags);
12337 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback),
12338 pu32Dst, pEFlags);
12339
12340 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12341 IEM_MC_ADVANCE_RIP_AND_FINISH();
12342 IEM_MC_END();
12343 break;
12344
12345 case IEMMODE_64BIT:
12346 IEM_MC_BEGIN(2, 0);
12347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12348 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12349 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12350
12351 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12352 IEM_MC_REF_EFLAGS(pEFlags);
12353 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback),
12354 pu64Dst, pEFlags);
12355
12356 IEM_MC_ADVANCE_RIP_AND_FINISH();
12357 IEM_MC_END();
12358 break;
12359
12360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12361 }
12362 }
12363 /* Register only. */
12364 else
12365 IEMOP_RAISE_INVALID_OPCODE_RET();
12366}
12367
12368/** Opcode 0x0f 0xc7 !11/6. */
12369#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12370FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12371{
12372 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12373 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12374 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12375 IEM_MC_BEGIN(2, 0);
12376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12379 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12380 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12381 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12382 IEM_MC_END();
12383}
12384#else
12385FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12386#endif
12387
12388/** Opcode 0x66 0x0f 0xc7 !11/6. */
12389#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12390FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12391{
12392 IEMOP_MNEMONIC(vmclear, "vmclear");
12393 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12394 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12395 IEM_MC_BEGIN(2, 0);
12396 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12397 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12399 IEMOP_HLP_DONE_DECODING();
12400 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12401 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12402 IEM_MC_END();
12403}
12404#else
12405FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12406#endif
12407
12408/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12409#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12410FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12411{
12412 IEMOP_MNEMONIC(vmxon, "vmxon");
12413 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12414 IEM_MC_BEGIN(2, 0);
12415 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12416 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12418 IEMOP_HLP_DONE_DECODING();
12419 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12420 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12421 IEM_MC_END();
12422}
12423#else
12424FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12425#endif
12426
12427/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12428#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12429FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12430{
12431 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12432 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12433 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12434 IEM_MC_BEGIN(2, 0);
12435 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12436 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12438 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12439 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12440 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12441 IEM_MC_END();
12442}
12443#else
12444FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12445#endif
12446
12447/** Opcode 0x0f 0xc7 11/7. */
12448FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12449{
12450 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12451 IEMOP_RAISE_INVALID_OPCODE_RET();
12452
12453 if (IEM_IS_MODRM_REG_MODE(bRm))
12454 {
12455 /* register destination. */
12456 switch (pVCpu->iem.s.enmEffOpSize)
12457 {
12458 case IEMMODE_16BIT:
12459 IEM_MC_BEGIN(2, 0);
12460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12461 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12462 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12463
12464 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12465 IEM_MC_REF_EFLAGS(pEFlags);
12466 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback),
12467 pu16Dst, pEFlags);
12468
12469 IEM_MC_ADVANCE_RIP_AND_FINISH();
12470 IEM_MC_END();
12471 break;
12472
12473 case IEMMODE_32BIT:
12474 IEM_MC_BEGIN(2, 0);
12475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12476 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12477 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12478
12479 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12480 IEM_MC_REF_EFLAGS(pEFlags);
12481 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback),
12482 pu32Dst, pEFlags);
12483
12484 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12485 IEM_MC_ADVANCE_RIP_AND_FINISH();
12486 IEM_MC_END();
12487 break;
12488
12489 case IEMMODE_64BIT:
12490 IEM_MC_BEGIN(2, 0);
12491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12492 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12493 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12494
12495 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12496 IEM_MC_REF_EFLAGS(pEFlags);
12497 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback),
12498 pu64Dst, pEFlags);
12499
12500 IEM_MC_ADVANCE_RIP_AND_FINISH();
12501 IEM_MC_END();
12502 break;
12503
12504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12505 }
12506 }
12507 /* Register only. */
12508 else
12509 IEMOP_RAISE_INVALID_OPCODE_RET();
12510}
12511
12512/**
12513 * Group 9 jump table for register variant.
12514 */
12515IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12516{ /* pfx: none, 066h, 0f3h, 0f2h */
12517 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12518 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12519 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12520 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12521 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12522 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12523 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12524 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12525};
12526AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12527
12528
12529/**
12530 * Group 9 jump table for memory variant.
12531 */
12532IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12533{ /* pfx: none, 066h, 0f3h, 0f2h */
12534 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12535 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12536 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12537 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12538 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12539 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12540 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12541 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12542};
12543AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12544
12545
12546/** Opcode 0x0f 0xc7. */
12547FNIEMOP_DEF(iemOp_Grp9)
12548{
12549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12550 if (IEM_IS_MODRM_REG_MODE(bRm))
12551 /* register, register */
12552 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12553 + pVCpu->iem.s.idxPrefix], bRm);
12554 /* memory, register */
12555 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12556 + pVCpu->iem.s.idxPrefix], bRm);
12557}
12558
12559
12560/**
12561 * Common 'bswap register' helper.
12562 */
12563FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12564{
12565 switch (pVCpu->iem.s.enmEffOpSize)
12566 {
12567 case IEMMODE_16BIT:
12568 IEM_MC_BEGIN(1, 0);
12569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12570 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12571 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12572 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12573 IEM_MC_ADVANCE_RIP_AND_FINISH();
12574 IEM_MC_END();
12575 break;
12576
12577 case IEMMODE_32BIT:
12578 IEM_MC_BEGIN(1, 0);
12579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12580 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12581 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12582 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12583 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12584 IEM_MC_ADVANCE_RIP_AND_FINISH();
12585 IEM_MC_END();
12586 break;
12587
12588 case IEMMODE_64BIT:
12589 IEM_MC_BEGIN(1, 0);
12590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12591 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12592 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12593 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12594 IEM_MC_ADVANCE_RIP_AND_FINISH();
12595 IEM_MC_END();
12596 break;
12597
12598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12599 }
12600}
12601
12602
12603/** Opcode 0x0f 0xc8. */
12604FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12605{
12606 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12607 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12608 prefix. REX.B is the correct prefix it appears. For a parallel
12609 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12610 IEMOP_HLP_MIN_486();
12611 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12612}
12613
12614
12615/** Opcode 0x0f 0xc9. */
12616FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12617{
12618 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12619 IEMOP_HLP_MIN_486();
12620 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12621}
12622
12623
12624/** Opcode 0x0f 0xca. */
12625FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12626{
12627 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12628 IEMOP_HLP_MIN_486();
12629 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12630}
12631
12632
12633/** Opcode 0x0f 0xcb. */
12634FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12635{
12636 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12637 IEMOP_HLP_MIN_486();
12638 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12639}
12640
12641
12642/** Opcode 0x0f 0xcc. */
12643FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12644{
12645 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12646 IEMOP_HLP_MIN_486();
12647 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12648}
12649
12650
12651/** Opcode 0x0f 0xcd. */
12652FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12653{
12654 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12655 IEMOP_HLP_MIN_486();
12656 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12657}
12658
12659
12660/** Opcode 0x0f 0xce. */
12661FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12662{
12663 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12664 IEMOP_HLP_MIN_486();
12665 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12666}
12667
12668
12669/** Opcode 0x0f 0xcf. */
12670FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12671{
12672 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12673 IEMOP_HLP_MIN_486();
12674 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12675}
12676
12677
12678/* Opcode 0x0f 0xd0 - invalid */
12679
12680
12681/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12682FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12683{
12684 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12685 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12686}
12687
12688
12689/* Opcode 0xf3 0x0f 0xd0 - invalid */
12690
12691
12692/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12693FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12694{
12695 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12696 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12697}
12698
12699
12700
12701/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12702FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12703{
12704 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12705 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12706}
12707
12708/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12709FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12710{
12711 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12712 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12713}
12714
12715/* Opcode 0xf3 0x0f 0xd1 - invalid */
12716/* Opcode 0xf2 0x0f 0xd1 - invalid */
12717
12718/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12719FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12720{
12721 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12722 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12723}
12724
12725
12726/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12727FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12728{
12729 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12730 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12731}
12732
12733
12734/* Opcode 0xf3 0x0f 0xd2 - invalid */
12735/* Opcode 0xf2 0x0f 0xd2 - invalid */
12736
12737/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12738FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12739{
12740 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12741 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12742}
12743
12744
12745/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12746FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12747{
12748 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12749 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12750}
12751
12752
12753/* Opcode 0xf3 0x0f 0xd3 - invalid */
12754/* Opcode 0xf2 0x0f 0xd3 - invalid */
12755
12756
12757/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12758FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12759{
12760 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12761 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12762}
12763
12764
12765/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12766FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12767{
12768 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12769 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12770}
12771
12772
12773/* Opcode 0xf3 0x0f 0xd4 - invalid */
12774/* Opcode 0xf2 0x0f 0xd4 - invalid */
12775
12776/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12777FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12778{
12779 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12780 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12781}
12782
12783/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12784FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12785{
12786 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12787 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12788}
12789
12790
12791/* Opcode 0xf3 0x0f 0xd5 - invalid */
12792/* Opcode 0xf2 0x0f 0xd5 - invalid */
12793
12794/* Opcode 0x0f 0xd6 - invalid */
12795
12796/**
12797 * @opcode 0xd6
12798 * @oppfx 0x66
12799 * @opcpuid sse2
12800 * @opgroup og_sse2_pcksclr_datamove
12801 * @opxcpttype none
12802 * @optest op1=-1 op2=2 -> op1=2
12803 * @optest op1=0 op2=-42 -> op1=-42
12804 */
12805FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12806{
12807 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12809 if (IEM_IS_MODRM_REG_MODE(bRm))
12810 {
12811 /*
12812 * Register, register.
12813 */
12814 IEM_MC_BEGIN(0, 2);
12815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12816 IEM_MC_LOCAL(uint64_t, uSrc);
12817
12818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12819 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12820
12821 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12822 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12823
12824 IEM_MC_ADVANCE_RIP_AND_FINISH();
12825 IEM_MC_END();
12826 }
12827 else
12828 {
12829 /*
12830 * Memory, register.
12831 */
12832 IEM_MC_BEGIN(0, 2);
12833 IEM_MC_LOCAL(uint64_t, uSrc);
12834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12835
12836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12838 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12839 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12840
12841 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12842 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12843
12844 IEM_MC_ADVANCE_RIP_AND_FINISH();
12845 IEM_MC_END();
12846 }
12847}
12848
12849
12850/**
12851 * @opcode 0xd6
12852 * @opcodesub 11 mr/reg
12853 * @oppfx f3
12854 * @opcpuid sse2
12855 * @opgroup og_sse2_simdint_datamove
12856 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12857 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12858 */
12859FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12860{
12861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12862 if (IEM_IS_MODRM_REG_MODE(bRm))
12863 {
12864 /*
12865 * Register, register.
12866 */
12867 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12868 IEM_MC_BEGIN(0, 1);
12869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12870 IEM_MC_LOCAL(uint64_t, uSrc);
12871
12872 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12873 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12874 IEM_MC_FPU_TO_MMX_MODE();
12875
12876 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12877 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12878
12879 IEM_MC_ADVANCE_RIP_AND_FINISH();
12880 IEM_MC_END();
12881 }
12882
12883 /**
12884 * @opdone
12885 * @opmnemonic udf30fd6mem
12886 * @opcode 0xd6
12887 * @opcodesub !11 mr/reg
12888 * @oppfx f3
12889 * @opunused intel-modrm
12890 * @opcpuid sse
12891 * @optest ->
12892 */
12893 else
12894 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12895}
12896
12897
12898/**
12899 * @opcode 0xd6
12900 * @opcodesub 11 mr/reg
12901 * @oppfx f2
12902 * @opcpuid sse2
12903 * @opgroup og_sse2_simdint_datamove
12904 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12905 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12906 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12907 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12908 * @optest op1=-42 op2=0xfedcba9876543210
12909 * -> op1=0xfedcba9876543210 ftw=0xff
12910 */
12911FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12912{
12913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12914 if (IEM_IS_MODRM_REG_MODE(bRm))
12915 {
12916 /*
12917 * Register, register.
12918 */
12919 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12920 IEM_MC_BEGIN(0, 1);
12921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12922 IEM_MC_LOCAL(uint64_t, uSrc);
12923
12924 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12925 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12926 IEM_MC_FPU_TO_MMX_MODE();
12927
12928 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
12929 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12930
12931 IEM_MC_ADVANCE_RIP_AND_FINISH();
12932 IEM_MC_END();
12933 }
12934
12935 /**
12936 * @opdone
12937 * @opmnemonic udf20fd6mem
12938 * @opcode 0xd6
12939 * @opcodesub !11 mr/reg
12940 * @oppfx f2
12941 * @opunused intel-modrm
12942 * @opcpuid sse
12943 * @optest ->
12944 */
12945 else
12946 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12947}
12948
12949
12950/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12951FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12952{
12953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12954 /* Docs says register only. */
12955 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12956 {
12957 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12958 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
12959 IEM_MC_BEGIN(2, 0);
12960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12961 IEM_MC_ARG(uint64_t *, puDst, 0);
12962 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12963 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12964 IEM_MC_PREPARE_FPU_USAGE();
12965 IEM_MC_FPU_TO_MMX_MODE();
12966
12967 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12968 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12969 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12970
12971 IEM_MC_ADVANCE_RIP_AND_FINISH();
12972 IEM_MC_END();
12973 }
12974 else
12975 IEMOP_RAISE_INVALID_OPCODE_RET();
12976}
12977
12978
12979/** Opcode 0x66 0x0f 0xd7 - */
12980FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12981{
12982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12983 /* Docs says register only. */
12984 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12985 {
12986 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12987 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
12988 IEM_MC_BEGIN(2, 0);
12989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12990 IEM_MC_ARG(uint64_t *, puDst, 0);
12991 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12992 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12993 IEM_MC_PREPARE_SSE_USAGE();
12994 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12995 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12996 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12997 IEM_MC_ADVANCE_RIP_AND_FINISH();
12998 IEM_MC_END();
12999 }
13000 else
13001 IEMOP_RAISE_INVALID_OPCODE_RET();
13002}
13003
13004
13005/* Opcode 0xf3 0x0f 0xd7 - invalid */
13006/* Opcode 0xf2 0x0f 0xd7 - invalid */
13007
13008
13009/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13010FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13011{
13012 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13013 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13014}
13015
13016
13017/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13018FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13019{
13020 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13021 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13022}
13023
13024
13025/* Opcode 0xf3 0x0f 0xd8 - invalid */
13026/* Opcode 0xf2 0x0f 0xd8 - invalid */
13027
13028/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13029FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13030{
13031 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13032 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13033}
13034
13035
13036/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13037FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13038{
13039 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13040 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13041}
13042
13043
13044/* Opcode 0xf3 0x0f 0xd9 - invalid */
13045/* Opcode 0xf2 0x0f 0xd9 - invalid */
13046
13047/** Opcode 0x0f 0xda - pminub Pq, Qq */
13048FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13049{
13050 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13051 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13052}
13053
13054
13055/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13056FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13057{
13058 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13059 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13060}
13061
13062/* Opcode 0xf3 0x0f 0xda - invalid */
13063/* Opcode 0xf2 0x0f 0xda - invalid */
13064
13065/** Opcode 0x0f 0xdb - pand Pq, Qq */
13066FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13067{
13068 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13069 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13070}
13071
13072
13073/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13074FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13075{
13076 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13077 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13078}
13079
13080
13081/* Opcode 0xf3 0x0f 0xdb - invalid */
13082/* Opcode 0xf2 0x0f 0xdb - invalid */
13083
13084/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13085FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13086{
13087 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13088 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13089}
13090
13091
13092/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13093FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13094{
13095 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13096 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13097}
13098
13099
13100/* Opcode 0xf3 0x0f 0xdc - invalid */
13101/* Opcode 0xf2 0x0f 0xdc - invalid */
13102
13103/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13104FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13105{
13106 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13107 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13108}
13109
13110
13111/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13112FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13113{
13114 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13115 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13116}
13117
13118
13119/* Opcode 0xf3 0x0f 0xdd - invalid */
13120/* Opcode 0xf2 0x0f 0xdd - invalid */
13121
13122/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13123FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13124{
13125 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13126 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13127}
13128
13129
13130/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13131FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13132{
13133 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13134 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13135}
13136
13137/* Opcode 0xf3 0x0f 0xde - invalid */
13138/* Opcode 0xf2 0x0f 0xde - invalid */
13139
13140
13141/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13142FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13143{
13144 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13145 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13146}
13147
13148
13149/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13150FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13151{
13152 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13153 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13154}
13155
13156
13157/* Opcode 0xf3 0x0f 0xdf - invalid */
13158/* Opcode 0xf2 0x0f 0xdf - invalid */
13159
13160/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13161FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13162{
13163 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13164 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13165}
13166
13167
13168/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13169FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13170{
13171 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13172 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13173}
13174
13175
13176/* Opcode 0xf3 0x0f 0xe0 - invalid */
13177/* Opcode 0xf2 0x0f 0xe0 - invalid */
13178
13179/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13180FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13181{
13182 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13183 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13184}
13185
13186
13187/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13188FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13189{
13190 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13191 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13192}
13193
13194
13195/* Opcode 0xf3 0x0f 0xe1 - invalid */
13196/* Opcode 0xf2 0x0f 0xe1 - invalid */
13197
13198/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13199FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13200{
13201 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13202 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13203}
13204
13205
13206/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13207FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13208{
13209 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13210 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13211}
13212
13213
13214/* Opcode 0xf3 0x0f 0xe2 - invalid */
13215/* Opcode 0xf2 0x0f 0xe2 - invalid */
13216
13217/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13218FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13219{
13220 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13221 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13222}
13223
13224
13225/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13226FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13227{
13228 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13229 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13230}
13231
13232
13233/* Opcode 0xf3 0x0f 0xe3 - invalid */
13234/* Opcode 0xf2 0x0f 0xe3 - invalid */
13235
13236/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13237FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13238{
13239 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13240 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13241}
13242
13243
13244/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13245FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13246{
13247 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13248 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13249}
13250
13251
13252/* Opcode 0xf3 0x0f 0xe4 - invalid */
13253/* Opcode 0xf2 0x0f 0xe4 - invalid */
13254
13255/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13256FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13257{
13258 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13259 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13260}
13261
13262
13263/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13264FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13265{
13266 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13267 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13268}
13269
13270
13271/* Opcode 0xf3 0x0f 0xe5 - invalid */
13272/* Opcode 0xf2 0x0f 0xe5 - invalid */
13273/* Opcode 0x0f 0xe6 - invalid */
13274
13275
13276/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13277FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13278{
13279 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13280 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13281}
13282
13283
13284/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13285FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13286{
13287 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13288 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13289}
13290
13291
13292/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13293FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13294{
13295 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13296 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13297}
13298
13299
13300/**
13301 * @opcode 0xe7
13302 * @opcodesub !11 mr/reg
13303 * @oppfx none
13304 * @opcpuid sse
13305 * @opgroup og_sse1_cachect
13306 * @opxcpttype none
13307 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13308 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13309 */
13310FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13311{
13312 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13314 if (IEM_IS_MODRM_MEM_MODE(bRm))
13315 {
13316 /* Register, memory. */
13317 IEM_MC_BEGIN(0, 2);
13318 IEM_MC_LOCAL(uint64_t, uSrc);
13319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13320
13321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13323 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13324 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13325 IEM_MC_FPU_TO_MMX_MODE();
13326
13327 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13328 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13329
13330 IEM_MC_ADVANCE_RIP_AND_FINISH();
13331 IEM_MC_END();
13332 }
13333 /**
13334 * @opdone
13335 * @opmnemonic ud0fe7reg
13336 * @opcode 0xe7
13337 * @opcodesub 11 mr/reg
13338 * @oppfx none
13339 * @opunused immediate
13340 * @opcpuid sse
13341 * @optest ->
13342 */
13343 else
13344 IEMOP_RAISE_INVALID_OPCODE_RET();
13345}
13346
13347/**
13348 * @opcode 0xe7
13349 * @opcodesub !11 mr/reg
13350 * @oppfx 0x66
13351 * @opcpuid sse2
13352 * @opgroup og_sse2_cachect
13353 * @opxcpttype 1
13354 * @optest op1=-1 op2=2 -> op1=2
13355 * @optest op1=0 op2=-42 -> op1=-42
13356 */
13357FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13358{
13359 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13361 if (IEM_IS_MODRM_MEM_MODE(bRm))
13362 {
13363 /* Register, memory. */
13364 IEM_MC_BEGIN(0, 2);
13365 IEM_MC_LOCAL(RTUINT128U, uSrc);
13366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13367
13368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13370 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13371 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13372
13373 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13374 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13375
13376 IEM_MC_ADVANCE_RIP_AND_FINISH();
13377 IEM_MC_END();
13378 }
13379
13380 /**
13381 * @opdone
13382 * @opmnemonic ud660fe7reg
13383 * @opcode 0xe7
13384 * @opcodesub 11 mr/reg
13385 * @oppfx 0x66
13386 * @opunused immediate
13387 * @opcpuid sse
13388 * @optest ->
13389 */
13390 else
13391 IEMOP_RAISE_INVALID_OPCODE_RET();
13392}
13393
13394/* Opcode 0xf3 0x0f 0xe7 - invalid */
13395/* Opcode 0xf2 0x0f 0xe7 - invalid */
13396
13397
13398/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13399FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13400{
13401 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13402 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13403}
13404
13405
13406/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13407FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13408{
13409 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13410 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13411}
13412
13413
13414/* Opcode 0xf3 0x0f 0xe8 - invalid */
13415/* Opcode 0xf2 0x0f 0xe8 - invalid */
13416
13417/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13418FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13419{
13420 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13421 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13422}
13423
13424
13425/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13426FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13427{
13428 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13429 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13430}
13431
13432
13433/* Opcode 0xf3 0x0f 0xe9 - invalid */
13434/* Opcode 0xf2 0x0f 0xe9 - invalid */
13435
13436
13437/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13438FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13439{
13440 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13441 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13442}
13443
13444
13445/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13446FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13447{
13448 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13449 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13450}
13451
13452
13453/* Opcode 0xf3 0x0f 0xea - invalid */
13454/* Opcode 0xf2 0x0f 0xea - invalid */
13455
13456
13457/** Opcode 0x0f 0xeb - por Pq, Qq */
13458FNIEMOP_DEF(iemOp_por_Pq_Qq)
13459{
13460 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13461 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13462}
13463
13464
13465/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13466FNIEMOP_DEF(iemOp_por_Vx_Wx)
13467{
13468 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13469 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13470}
13471
13472
13473/* Opcode 0xf3 0x0f 0xeb - invalid */
13474/* Opcode 0xf2 0x0f 0xeb - invalid */
13475
13476/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13477FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13478{
13479 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13480 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13481}
13482
13483
13484/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13485FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13486{
13487 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13488 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13489}
13490
13491
13492/* Opcode 0xf3 0x0f 0xec - invalid */
13493/* Opcode 0xf2 0x0f 0xec - invalid */
13494
13495/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13496FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13497{
13498 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13499 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13500}
13501
13502
13503/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13504FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13505{
13506 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13507 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13508}
13509
13510
13511/* Opcode 0xf3 0x0f 0xed - invalid */
13512/* Opcode 0xf2 0x0f 0xed - invalid */
13513
13514
13515/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13516FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13517{
13518 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13519 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13520}
13521
13522
13523/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13524FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13525{
13526 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13527 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13528}
13529
13530
13531/* Opcode 0xf3 0x0f 0xee - invalid */
13532/* Opcode 0xf2 0x0f 0xee - invalid */
13533
13534
13535/** Opcode 0x0f 0xef - pxor Pq, Qq */
13536FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13537{
13538 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13539 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13540}
13541
13542
13543/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13544FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13545{
13546 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13547 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13548}
13549
13550
13551/* Opcode 0xf3 0x0f 0xef - invalid */
13552/* Opcode 0xf2 0x0f 0xef - invalid */
13553
13554/* Opcode 0x0f 0xf0 - invalid */
13555/* Opcode 0x66 0x0f 0xf0 - invalid */
13556
13557
13558/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13559FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13560{
13561 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13563 if (IEM_IS_MODRM_REG_MODE(bRm))
13564 {
13565 /*
13566 * Register, register - (not implemented, assuming it raises \#UD).
13567 */
13568 IEMOP_RAISE_INVALID_OPCODE_RET();
13569 }
13570 else
13571 {
13572 /*
13573 * Register, memory.
13574 */
13575 IEM_MC_BEGIN(0, 2);
13576 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13578
13579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13581 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13582 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13583 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13584 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13585
13586 IEM_MC_ADVANCE_RIP_AND_FINISH();
13587 IEM_MC_END();
13588 }
13589}
13590
13591
13592/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13593FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13594{
13595 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13596 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13597}
13598
13599
13600/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13601FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13602{
13603 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13604 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13605}
13606
13607
13608/* Opcode 0xf2 0x0f 0xf1 - invalid */
13609
13610/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13611FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13612{
13613 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13614 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13615}
13616
13617
13618/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13619FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13620{
13621 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13622 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13623}
13624
13625
13626/* Opcode 0xf2 0x0f 0xf2 - invalid */
13627
13628/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13629FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13630{
13631 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13632 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13633}
13634
13635
13636/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13637FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13638{
13639 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13640 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13641}
13642
13643/* Opcode 0xf2 0x0f 0xf3 - invalid */
13644
13645/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13646FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13647{
13648 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13649 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13650}
13651
13652
13653/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13654FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13655{
13656 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13657 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13658}
13659
13660
13661/* Opcode 0xf2 0x0f 0xf4 - invalid */
13662
13663/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13664FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13665{
13666 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13667 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13668}
13669
13670
13671/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13672FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13673{
13674 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13675 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13676}
13677
13678/* Opcode 0xf2 0x0f 0xf5 - invalid */
13679
13680/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13681FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13682{
13683 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13684 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13685}
13686
13687
13688/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13689FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13690{
13691 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13692 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13693}
13694
13695
13696/* Opcode 0xf2 0x0f 0xf6 - invalid */
13697
13698/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13699FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13700/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13701FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13702/* Opcode 0xf2 0x0f 0xf7 - invalid */
13703
13704
13705/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13706FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13707{
13708 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13709 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13710}
13711
13712
13713/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13714FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13715{
13716 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13717 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13718}
13719
13720
13721/* Opcode 0xf2 0x0f 0xf8 - invalid */
13722
13723
13724/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13725FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13726{
13727 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13728 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13729}
13730
13731
13732/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13733FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13734{
13735 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13736 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13737}
13738
13739
13740/* Opcode 0xf2 0x0f 0xf9 - invalid */
13741
13742
13743/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13744FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13745{
13746 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13747 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13748}
13749
13750
13751/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13752FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13753{
13754 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13755 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13756}
13757
13758
13759/* Opcode 0xf2 0x0f 0xfa - invalid */
13760
13761
13762/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13763FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13764{
13765 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13766 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13767}
13768
13769
13770/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13771FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13772{
13773 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13774 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13775}
13776
13777
13778/* Opcode 0xf2 0x0f 0xfb - invalid */
13779
13780
13781/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13782FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13783{
13784 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13785 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13786}
13787
13788
13789/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13790FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13791{
13792 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13793 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13794}
13795
13796
13797/* Opcode 0xf2 0x0f 0xfc - invalid */
13798
13799
13800/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13801FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13802{
13803 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13804 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13805}
13806
13807
13808/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13809FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13810{
13811 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13812 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13813}
13814
13815
13816/* Opcode 0xf2 0x0f 0xfd - invalid */
13817
13818
13819/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13820FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13821{
13822 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13823 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13824}
13825
13826
13827/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13828FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13829{
13830 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13831 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13832}
13833
13834
13835/* Opcode 0xf2 0x0f 0xfe - invalid */
13836
13837
13838/** Opcode **** 0x0f 0xff - UD0 */
13839FNIEMOP_DEF(iemOp_ud0)
13840{
13841 IEMOP_MNEMONIC(ud0, "ud0");
13842 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13843 {
13844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13845#ifndef TST_IEM_CHECK_MC
13846 if (IEM_IS_MODRM_MEM_MODE(bRm))
13847 {
13848 RTGCPTR GCPtrEff;
13849 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13850 if (rcStrict != VINF_SUCCESS)
13851 return rcStrict;
13852 }
13853#endif
13854 IEMOP_HLP_DONE_DECODING();
13855 }
13856 IEMOP_RAISE_INVALID_OPCODE_RET();
13857}
13858
13859
13860
13861/**
13862 * Two byte opcode map, first byte 0x0f.
13863 *
13864 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13865 * check if it needs updating as well when making changes.
13866 */
13867IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13868{
13869 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13870 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13871 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13872 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13873 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13874 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13875 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13876 /* 0x06 */ IEMOP_X4(iemOp_clts),
13877 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13878 /* 0x08 */ IEMOP_X4(iemOp_invd),
13879 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13880 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13881 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13882 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13883 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13884 /* 0x0e */ IEMOP_X4(iemOp_femms),
13885 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13886
13887 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13888 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13889 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13890 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13891 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13892 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13893 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13894 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13895 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13896 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13897 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13898 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13899 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13900 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13901 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13902 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13903
13904 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13905 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13906 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13907 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13908 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13909 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13910 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13911 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13912 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13913 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13914 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13915 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13916 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13917 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13918 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13919 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13920
13921 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13922 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13923 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13924 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13925 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13926 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13927 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13928 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13929 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13930 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13931 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13932 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13933 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13934 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13935 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13936 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13937
13938 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13939 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13940 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13941 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13942 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13943 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13944 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13945 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13946 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13947 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13948 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13949 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13950 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13951 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13952 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13953 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13954
13955 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13956 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13957 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13958 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13959 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13960 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13961 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13962 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13963 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13964 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13965 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13966 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13967 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13968 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13969 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13970 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13971
13972 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13973 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13974 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13975 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13976 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13977 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13978 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13979 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13980 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13981 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13982 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13983 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13984 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13985 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13986 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13987 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13988
13989 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13990 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13991 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13992 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13993 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13994 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13995 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13996 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13997
13998 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13999 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14000 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14001 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14002 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14003 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14004 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14005 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14006
14007 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14008 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14009 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14010 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14011 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14012 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14013 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14014 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14015 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14016 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14017 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14018 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14019 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14020 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14021 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14022 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14023
14024 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14025 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14026 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14027 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14028 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14029 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14030 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14031 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14032 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14033 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14034 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14035 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14036 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14037 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14038 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14039 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14040
14041 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14042 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14043 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14044 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14045 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14046 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14047 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14048 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14049 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14050 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14051 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14052 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14053 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14054 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14055 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14056 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14057
14058 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14059 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14060 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14061 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14062 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14063 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14064 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14065 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14066 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14067 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14068 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14069 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14070 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14071 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14072 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14073 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14074
14075 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14076 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14077 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14078 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14079 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14080 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14081 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14082 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14083 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14084 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14085 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14086 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14087 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14088 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14089 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14090 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14091
14092 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14093 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14094 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14095 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14096 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14097 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14098 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14099 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14100 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14102 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14104 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14105 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14106 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14107 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14108
14109 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14110 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14111 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14112 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14113 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14114 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14115 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14116 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14118 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14119 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14120 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14121 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14122 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14124 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14125
14126 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14127 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14128 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14129 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14130 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14131 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14132 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14133 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14134 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14135 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14136 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14137 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14138 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14139 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14140 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14141 /* 0xff */ IEMOP_X4(iemOp_ud0),
14142};
14143AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14144
14145/** @} */
14146
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette